repo_id
stringlengths 15
132
| file_path
stringlengths 34
176
| content
stringlengths 2
3.52M
| __index_level_0__
int64 0
0
|
---|---|---|---|
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_flow_runtimes_workspace_independent_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._flow_runtimes_workspace_independent_operations import build_get_runtime_latest_config_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FlowRuntimesWorkspaceIndependentOperations:
"""FlowRuntimesWorkspaceIndependentOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def get_runtime_latest_config(
self,
**kwargs: Any
) -> "_models.RuntimeConfiguration":
"""get_runtime_latest_config.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RuntimeConfiguration, or the result of cls(response)
:rtype: ~flow.models.RuntimeConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RuntimeConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_runtime_latest_config_request(
template_url=self.get_runtime_latest_config.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('RuntimeConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_runtime_latest_config.metadata = {'url': '/flow/api/runtimes/latestConfig'} # type: ignore
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_connections_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._connections_operations import build_create_connection_request, build_delete_connection_request, build_get_connection_request, build_get_connection_with_secrets_request, build_list_azure_open_ai_deployments_request, build_list_connection_specs_request, build_list_connections_request, build_update_connection_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ConnectionsOperations:
"""ConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
body: Optional["_models.CreateOrUpdateConnectionRequestDto"] = None,
**kwargs: Any
) -> "_models.ConnectionDto":
"""create_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:param body:
:type body: ~flow.models.CreateOrUpdateConnectionRequestDto
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionDto, or the result of cls(response)
:rtype: ~flow.models.ConnectionDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto')
else:
_json = None
request = build_create_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
content_type=content_type,
json=_json,
template_url=self.create_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore
@distributed_trace_async
async def update_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
body: Optional["_models.CreateOrUpdateConnectionRequestDto"] = None,
**kwargs: Any
) -> "_models.ConnectionDto":
"""update_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:param body:
:type body: ~flow.models.CreateOrUpdateConnectionRequestDto
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionDto, or the result of cls(response)
:rtype: ~flow.models.ConnectionDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'CreateOrUpdateConnectionRequestDto')
else:
_json = None
request = build_update_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
content_type=content_type,
json=_json,
template_url=self.update_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore
@distributed_trace_async
async def get_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.ConnectionDto":
"""get_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionDto, or the result of cls(response)
:rtype: ~flow.models.ConnectionDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
template_url=self.get_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore
@distributed_trace_async
async def delete_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.ConnectionDto":
"""delete_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionDto, or the result of cls(response)
:rtype: ~flow.models.ConnectionDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
template_url=self.delete_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}'} # type: ignore
@distributed_trace_async
async def get_connection_with_secrets(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.ConnectionDto":
"""get_connection_with_secrets.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionDto, or the result of cls(response)
:rtype: ~flow.models.ConnectionDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_connection_with_secrets_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
template_url=self.get_connection_with_secrets.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_connection_with_secrets.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/listsecrets'} # type: ignore
@distributed_trace_async
async def list_connections(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> List["_models.ConnectionDto"]:
"""list_connections.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of ConnectionDto, or the result of cls(response)
:rtype: list[~flow.models.ConnectionDto]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionDto"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_connections_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.list_connections.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[ConnectionDto]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_connections.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections'} # type: ignore
@distributed_trace_async
async def list_connection_specs(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> List["_models.WorkspaceConnectionSpec"]:
"""list_connection_specs.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of WorkspaceConnectionSpec, or the result of cls(response)
:rtype: list[~flow.models.WorkspaceConnectionSpec]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.WorkspaceConnectionSpec"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_connection_specs_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.list_connection_specs.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[WorkspaceConnectionSpec]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_connection_specs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/specs'} # type: ignore
@distributed_trace_async
async def list_azure_open_ai_deployments(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
**kwargs: Any
) -> List["_models.AzureOpenAIDeploymentDto"]:
"""list_azure_open_ai_deployments.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of AzureOpenAIDeploymentDto, or the result of cls(response)
:rtype: list[~flow.models.AzureOpenAIDeploymentDto]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.AzureOpenAIDeploymentDto"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_azure_open_ai_deployments_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
template_url=self.list_azure_open_ai_deployments.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[AzureOpenAIDeploymentDto]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_azure_open_ai_deployments.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connections/{connectionName}/AzureOpenAIDeployments'} # type: ignore
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_connection_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._connection_operations import build_create_connection_request, build_delete_connection_request, build_get_connection_request, build_list_connection_specs_request, build_list_connections_request, build_update_connection_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ConnectionOperations:
"""ConnectionOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
body: Optional["_models.CreateOrUpdateConnectionRequest"] = None,
**kwargs: Any
) -> "_models.ConnectionEntity":
"""create_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:param body:
:type body: ~flow.models.CreateOrUpdateConnectionRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionEntity, or the result of cls(response)
:rtype: ~flow.models.ConnectionEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'CreateOrUpdateConnectionRequest')
else:
_json = None
request = build_create_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
content_type=content_type,
json=_json,
template_url=self.create_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionEntity', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore
@distributed_trace_async
async def update_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
body: Optional["_models.CreateOrUpdateConnectionRequest"] = None,
**kwargs: Any
) -> "_models.ConnectionEntity":
"""update_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:param body:
:type body: ~flow.models.CreateOrUpdateConnectionRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionEntity, or the result of cls(response)
:rtype: ~flow.models.ConnectionEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'CreateOrUpdateConnectionRequest')
else:
_json = None
request = build_update_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
content_type=content_type,
json=_json,
template_url=self.update_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionEntity', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore
@distributed_trace_async
async def get_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.ConnectionEntity":
"""get_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionEntity, or the result of cls(response)
:rtype: ~flow.models.ConnectionEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
template_url=self.get_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionEntity', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore
@distributed_trace_async
async def delete_connection(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
connection_name: str,
connection_scope: Optional[Union[str, "_models.ConnectionScope"]] = None,
**kwargs: Any
) -> "_models.ConnectionEntity":
"""delete_connection.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param connection_name:
:type connection_name: str
:param connection_scope:
:type connection_scope: str or ~flow.models.ConnectionScope
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionEntity, or the result of cls(response)
:rtype: ~flow.models.ConnectionEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionEntity"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_connection_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
connection_name=connection_name,
connection_scope=connection_scope,
template_url=self.delete_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('ConnectionEntity', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete_connection.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/{connectionName}'} # type: ignore
@distributed_trace_async
async def list_connections(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> List["_models.ConnectionEntity"]:
"""list_connections.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of ConnectionEntity, or the result of cls(response)
:rtype: list[~flow.models.ConnectionEntity]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionEntity"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_connections_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.list_connections.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[ConnectionEntity]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_connections.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection'} # type: ignore
@distributed_trace_async
async def list_connection_specs(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
**kwargs: Any
) -> List["_models.ConnectionSpec"]:
"""list_connection_specs.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of ConnectionSpec, or the result of cls(response)
:rtype: list[~flow.models.ConnectionSpec]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.ConnectionSpec"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_connection_specs_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
template_url=self.list_connection_specs.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[ConnectionSpec]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_connection_specs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/Connection/specs'} # type: ignore
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/aio/operations/_bulk_runs_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._bulk_runs_operations import build_cancel_flow_run_request, build_clone_flow_from_flow_run_request, build_get_flow_child_runs_request, build_get_flow_node_run_base_path_request, build_get_flow_node_runs_request, build_get_flow_run_info_request, build_get_flow_run_log_content_request, build_submit_bulk_run_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BulkRunsOperations:
"""BulkRunsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~flow.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def submit_bulk_run(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
body: Optional["_models.SubmitBulkRunRequest"] = None,
**kwargs: Any
) -> str:
"""submit_bulk_run.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param body:
:type body: ~flow.models.SubmitBulkRunRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'SubmitBulkRunRequest')
else:
_json = None
request = build_submit_bulk_run_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
content_type=content_type,
json=_json,
template_url=self.submit_bulk_run.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('str', pipeline_response)
if response.status_code == 204:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
submit_bulk_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/submit'} # type: ignore
@distributed_trace_async
async def cancel_flow_run(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
**kwargs: Any
) -> str:
"""cancel_flow_run.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_cancel_flow_run_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
template_url=self.cancel_flow_run.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
cancel_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/cancel'} # type: ignore
@distributed_trace_async
async def clone_flow_from_flow_run(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
body: Optional["_models.CreateFlowRequest"] = None,
**kwargs: Any
) -> "_models.FlowDto":
"""clone_flow_from_flow_run.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:param body:
:type body: ~flow.models.CreateFlowRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FlowDto, or the result of cls(response)
:rtype: ~flow.models.FlowDto
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowDto"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
if body is not None:
_json = self._serialize.body(body, 'CreateFlowRequest')
else:
_json = None
request = build_clone_flow_from_flow_run_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
content_type=content_type,
json=_json,
template_url=self.clone_flow_from_flow_run.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('FlowDto', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
clone_flow_from_flow_run.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/clone'} # type: ignore
@distributed_trace_async
async def get_flow_run_info(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
**kwargs: Any
) -> "_models.FlowRunInfo":
"""get_flow_run_info.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FlowRunInfo, or the result of cls(response)
:rtype: ~flow.models.FlowRunInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_flow_run_info_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
template_url=self.get_flow_run_info.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('FlowRunInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_flow_run_info.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}'} # type: ignore
@distributed_trace_async
async def get_flow_child_runs(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
index: Optional[int] = None,
start_index: Optional[int] = None,
end_index: Optional[int] = None,
**kwargs: Any
) -> List[Any]:
"""get_flow_child_runs.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:param index:
:type index: int
:param start_index:
:type start_index: int
:param end_index:
:type end_index: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of any, or the result of cls(response)
:rtype: list[any]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[Any]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_flow_child_runs_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
index=index,
start_index=start_index,
end_index=end_index,
template_url=self.get_flow_child_runs.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[object]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_flow_child_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/childRuns'} # type: ignore
@distributed_trace_async
async def get_flow_node_runs(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
node_name: str,
index: Optional[int] = None,
start_index: Optional[int] = None,
end_index: Optional[int] = None,
aggregation: Optional[bool] = False,
**kwargs: Any
) -> List[Any]:
"""get_flow_node_runs.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:param node_name:
:type node_name: str
:param index:
:type index: int
:param start_index:
:type start_index: int
:param end_index:
:type end_index: int
:param aggregation:
:type aggregation: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of any, or the result of cls(response)
:rtype: list[any]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[Any]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_flow_node_runs_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
node_name=node_name,
index=index,
start_index=start_index,
end_index=end_index,
aggregation=aggregation,
template_url=self.get_flow_node_runs.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('[object]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_flow_node_runs.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}'} # type: ignore
@distributed_trace_async
async def get_flow_node_run_base_path(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
node_name: str,
**kwargs: Any
) -> "_models.FlowRunBasePath":
"""get_flow_node_run_base_path.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:param node_name:
:type node_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FlowRunBasePath, or the result of cls(response)
:rtype: ~flow.models.FlowRunBasePath
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowRunBasePath"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_flow_node_run_base_path_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
node_name=node_name,
template_url=self.get_flow_node_run_base_path.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('FlowRunBasePath', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_flow_node_run_base_path.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/nodeRuns/{nodeName}/basePath'} # type: ignore
@distributed_trace_async
async def get_flow_run_log_content(
self,
subscription_id: str,
resource_group_name: str,
workspace_name: str,
flow_run_id: str,
**kwargs: Any
) -> str:
"""get_flow_run_log_content.
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param flow_run_id:
:type flow_run_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_flow_run_log_content_request(
subscription_id=subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
flow_run_id=flow_run_id,
template_url=self.get_flow_run_log_content.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_flow_run_log_content.metadata = {'url': '/flow/api/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/BulkRuns/{flowRunId}/logContent'} # type: ignore
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/__init__.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import ACIAdvanceSettings
from ._models_py3 import AEVAComputeConfiguration
from ._models_py3 import AEVAResourceConfiguration
from ._models_py3 import AISuperComputerConfiguration
from ._models_py3 import AISuperComputerScalePolicy
from ._models_py3 import AISuperComputerStorageReferenceConfiguration
from ._models_py3 import AKSAdvanceSettings
from ._models_py3 import AKSReplicaStatus
from ._models_py3 import AMLComputeConfiguration
from ._models_py3 import APCloudConfiguration
from ._models_py3 import Activate
from ._models_py3 import AdditionalErrorInfo
from ._models_py3 import AdhocTriggerScheduledCommandJobRequest
from ._models_py3 import AdhocTriggerScheduledSparkJobRequest
from ._models_py3 import AetherAPCloudConfiguration
from ._models_py3 import AetherAmlDataset
from ._models_py3 import AetherAmlSparkCloudSetting
from ._models_py3 import AetherArgumentAssignment
from ._models_py3 import AetherAssetDefinition
from ._models_py3 import AetherAssetOutputSettings
from ._models_py3 import AetherAutoFeaturizeConfiguration
from ._models_py3 import AetherAutoMLComponentConfiguration
from ._models_py3 import AetherAutoTrainConfiguration
from ._models_py3 import AetherAzureBlobReference
from ._models_py3 import AetherAzureDataLakeGen2Reference
from ._models_py3 import AetherAzureDataLakeReference
from ._models_py3 import AetherAzureDatabaseReference
from ._models_py3 import AetherAzureFilesReference
from ._models_py3 import AetherBatchAiComputeInfo
from ._models_py3 import AetherBuildArtifactInfo
from ._models_py3 import AetherCloudBuildDropPathInfo
from ._models_py3 import AetherCloudBuildInfo
from ._models_py3 import AetherCloudBuildQueueInfo
from ._models_py3 import AetherCloudPrioritySetting
from ._models_py3 import AetherCloudSettings
from ._models_py3 import AetherColumnTransformer
from ._models_py3 import AetherComputeConfiguration
from ._models_py3 import AetherComputeSetting
from ._models_py3 import AetherControlInput
from ._models_py3 import AetherControlOutput
from ._models_py3 import AetherCopyDataTask
from ._models_py3 import AetherCosmosReference
from ._models_py3 import AetherCreatedBy
from ._models_py3 import AetherCustomReference
from ._models_py3 import AetherDBFSReference
from ._models_py3 import AetherDataLocation
from ._models_py3 import AetherDataLocationReuseCalculationFields
from ._models_py3 import AetherDataPath
from ._models_py3 import AetherDataReference
from ._models_py3 import AetherDataSetDefinition
from ._models_py3 import AetherDataSetDefinitionValue
from ._models_py3 import AetherDataSettings
from ._models_py3 import AetherDataTransferCloudConfiguration
from ._models_py3 import AetherDataTransferSink
from ._models_py3 import AetherDataTransferSource
from ._models_py3 import AetherDataTransferV2CloudSetting
from ._models_py3 import AetherDatabaseSink
from ._models_py3 import AetherDatabaseSource
from ._models_py3 import AetherDatabricksComputeInfo
from ._models_py3 import AetherDatasetOutput
from ._models_py3 import AetherDatasetOutputOptions
from ._models_py3 import AetherDatasetRegistration
from ._models_py3 import AetherDatastoreSetting
from ._models_py3 import AetherDoWhileControlFlowInfo
from ._models_py3 import AetherDoWhileControlFlowRunSettings
from ._models_py3 import AetherDockerSettingConfiguration
from ._models_py3 import AetherEntityInterfaceDocumentation
from ._models_py3 import AetherEntrySetting
from ._models_py3 import AetherEnvironmentConfiguration
from ._models_py3 import AetherEsCloudConfiguration
from ._models_py3 import AetherExportDataTask
from ._models_py3 import AetherFeaturizationSettings
from ._models_py3 import AetherFileSystem
from ._models_py3 import AetherForecastHorizon
from ._models_py3 import AetherForecastingSettings
from ._models_py3 import AetherGeneralSettings
from ._models_py3 import AetherGlobsOptions
from ._models_py3 import AetherGraphControlNode
from ._models_py3 import AetherGraphControlReferenceNode
from ._models_py3 import AetherGraphDatasetNode
from ._models_py3 import AetherGraphEdge
from ._models_py3 import AetherGraphEntity
from ._models_py3 import AetherGraphModuleNode
from ._models_py3 import AetherGraphReferenceNode
from ._models_py3 import AetherHdfsReference
from ._models_py3 import AetherHdiClusterComputeInfo
from ._models_py3 import AetherHdiRunConfiguration
from ._models_py3 import AetherHyperDriveConfiguration
from ._models_py3 import AetherIdentitySetting
from ._models_py3 import AetherImportDataTask
from ._models_py3 import AetherInputSetting
from ._models_py3 import AetherInteractiveConfig
from ._models_py3 import AetherK8SConfiguration
from ._models_py3 import AetherLegacyDataPath
from ._models_py3 import AetherLimitSettings
from ._models_py3 import AetherMlcComputeInfo
from ._models_py3 import AetherModuleEntity
from ._models_py3 import AetherModuleExtendedProperties
from ._models_py3 import AetherNCrossValidations
from ._models_py3 import AetherOutputSetting
from ._models_py3 import AetherParallelForControlFlowInfo
from ._models_py3 import AetherParameterAssignment
from ._models_py3 import AetherPhillyHdfsReference
from ._models_py3 import AetherPortInfo
from ._models_py3 import AetherPriorityConfig
from ._models_py3 import AetherPriorityConfiguration
from ._models_py3 import AetherRegisteredDataSetReference
from ._models_py3 import AetherRemoteDockerComputeInfo
from ._models_py3 import AetherResourceAssignment
from ._models_py3 import AetherResourceAttributeAssignment
from ._models_py3 import AetherResourceAttributeDefinition
from ._models_py3 import AetherResourceConfig
from ._models_py3 import AetherResourceConfiguration
from ._models_py3 import AetherResourceModel
from ._models_py3 import AetherResourcesSetting
from ._models_py3 import AetherSavedDataSetReference
from ._models_py3 import AetherScopeCloudConfiguration
from ._models_py3 import AetherSeasonality
from ._models_py3 import AetherSqlDataPath
from ._models_py3 import AetherStackEnsembleSettings
from ._models_py3 import AetherStoredProcedureParameter
from ._models_py3 import AetherStructuredInterface
from ._models_py3 import AetherStructuredInterfaceInput
from ._models_py3 import AetherStructuredInterfaceOutput
from ._models_py3 import AetherStructuredInterfaceParameter
from ._models_py3 import AetherSubGraphConfiguration
from ._models_py3 import AetherSweepEarlyTerminationPolicy
from ._models_py3 import AetherSweepSettings
from ._models_py3 import AetherSweepSettingsLimits
from ._models_py3 import AetherTargetLags
from ._models_py3 import AetherTargetRollingWindowSize
from ._models_py3 import AetherTargetSelectorConfiguration
from ._models_py3 import AetherTestDataSettings
from ._models_py3 import AetherTorchDistributedConfiguration
from ._models_py3 import AetherTrainingOutput
from ._models_py3 import AetherTrainingSettings
from ._models_py3 import AetherUIAzureOpenAIDeploymentNameSelector
from ._models_py3 import AetherUIAzureOpenAIModelCapabilities
from ._models_py3 import AetherUIColumnPicker
from ._models_py3 import AetherUIJsonEditor
from ._models_py3 import AetherUIParameterHint
from ._models_py3 import AetherUIPromptFlowConnectionSelector
from ._models_py3 import AetherValidationDataSettings
from ._models_py3 import AetherVsoBuildArtifactInfo
from ._models_py3 import AetherVsoBuildDefinitionInfo
from ._models_py3 import AetherVsoBuildInfo
from ._models_py3 import AmlDataset
from ._models_py3 import AmlK8SConfiguration
from ._models_py3 import AmlK8SPriorityConfiguration
from ._models_py3 import AmlSparkCloudSetting
from ._models_py3 import ApiAndParameters
from ._models_py3 import ApplicationEndpointConfiguration
from ._models_py3 import ArgumentAssignment
from ._models_py3 import Asset
from ._models_py3 import AssetDefinition
from ._models_py3 import AssetNameAndVersionIdentifier
from ._models_py3 import AssetOutputSettings
from ._models_py3 import AssetOutputSettingsParameter
from ._models_py3 import AssetPublishResult
from ._models_py3 import AssetPublishSingleRegionResult
from ._models_py3 import AssetTypeMetaInfo
from ._models_py3 import AssetVersionPublishRequest
from ._models_py3 import AssignedUser
from ._models_py3 import AuthKeys
from ._models_py3 import AutoClusterComputeSpecification
from ._models_py3 import AutoDeleteSetting
from ._models_py3 import AutoFeaturizeConfiguration
from ._models_py3 import AutoMLComponentConfiguration
from ._models_py3 import AutoScaler
from ._models_py3 import AutoTrainConfiguration
from ._models_py3 import AutologgerSettings
from ._models_py3 import AvailabilityResponse
from ._models_py3 import AzureBlobReference
from ._models_py3 import AzureDataLakeGen2Reference
from ._models_py3 import AzureDataLakeReference
from ._models_py3 import AzureDatabaseReference
from ._models_py3 import AzureFilesReference
from ._models_py3 import AzureMLModuleVersionDescriptor
from ._models_py3 import AzureOpenAIDeploymentDto
from ._models_py3 import AzureOpenAIModelCapabilities
from ._models_py3 import BatchAiComputeInfo
from ._models_py3 import BatchDataInput
from ._models_py3 import BatchExportComponentSpecResponse
from ._models_py3 import BatchExportRawComponentResponse
from ._models_py3 import BatchGetComponentHashesRequest
from ._models_py3 import BatchGetComponentRequest
from ._models_py3 import Binding
from ._models_py3 import BulkTestDto
from ._models_py3 import CloudError
from ._models_py3 import CloudPrioritySetting
from ._models_py3 import CloudSettings
from ._models_py3 import ColumnTransformer
from ._models_py3 import CommandJob
from ._models_py3 import CommandJobLimits
from ._models_py3 import CommandReturnCodeConfig
from ._models_py3 import ComponentConfiguration
from ._models_py3 import ComponentInput
from ._models_py3 import ComponentJob
from ._models_py3 import ComponentJobInput
from ._models_py3 import ComponentJobOutput
from ._models_py3 import ComponentNameAndDefaultVersion
from ._models_py3 import ComponentNameMetaInfo
from ._models_py3 import ComponentOutput
from ._models_py3 import ComponentPreflightResult
from ._models_py3 import ComponentSpecMetaInfo
from ._models_py3 import ComponentUpdateRequest
from ._models_py3 import ComponentValidationRequest
from ._models_py3 import ComponentValidationResponse
from ._models_py3 import Compute
from ._models_py3 import ComputeConfiguration
from ._models_py3 import ComputeContract
from ._models_py3 import ComputeIdentityContract
from ._models_py3 import ComputeIdentityDto
from ._models_py3 import ComputeInfo
from ._models_py3 import ComputeProperties
from ._models_py3 import ComputeRPUserAssignedIdentity
from ._models_py3 import ComputeRequest
from ._models_py3 import ComputeSetting
from ._models_py3 import ComputeStatus
from ._models_py3 import ComputeStatusDetail
from ._models_py3 import ComputeWarning
from ._models_py3 import ConnectionConfigSpec
from ._models_py3 import ConnectionDto
from ._models_py3 import ConnectionEntity
from ._models_py3 import ConnectionOverrideSetting
from ._models_py3 import ConnectionSpec
from ._models_py3 import ContainerInstanceConfiguration
from ._models_py3 import ContainerRegistry
from ._models_py3 import ContainerResourceRequirements
from ._models_py3 import ControlInput
from ._models_py3 import ControlOutput
from ._models_py3 import CopyDataTask
from ._models_py3 import CreateFlowFromSampleRequest
from ._models_py3 import CreateFlowRequest
from ._models_py3 import CreateFlowRuntimeRequest
from ._models_py3 import CreateFlowSessionRequest
from ._models_py3 import CreateInferencePipelineRequest
from ._models_py3 import CreateOrUpdateConnectionRequest
from ._models_py3 import CreateOrUpdateConnectionRequestDto
from ._models_py3 import CreatePipelineDraftRequest
from ._models_py3 import CreatePipelineJobScheduleDto
from ._models_py3 import CreatePublishedPipelineRequest
from ._models_py3 import CreateRealTimeEndpointRequest
from ._models_py3 import CreatedBy
from ._models_py3 import CreatedFromDto
from ._models_py3 import CreationContext
from ._models_py3 import Cron
from ._models_py3 import CustomConnectionConfig
from ._models_py3 import CustomReference
from ._models_py3 import DBFSReference
from ._models_py3 import Data
from ._models_py3 import DataInfo
from ._models_py3 import DataLocation
from ._models_py3 import DataPath
from ._models_py3 import DataPathParameter
from ._models_py3 import DataPortDto
from ._models_py3 import DataReference
from ._models_py3 import DataReferenceConfiguration
from ._models_py3 import DataSetDefinition
from ._models_py3 import DataSetDefinitionValue
from ._models_py3 import DataSetPathParameter
from ._models_py3 import DataSettings
from ._models_py3 import DataTransferCloudConfiguration
from ._models_py3 import DataTransferSink
from ._models_py3 import DataTransferSource
from ._models_py3 import DataTransferV2CloudSetting
from ._models_py3 import DataTypeCreationInfo
from ._models_py3 import DatabaseSink
from ._models_py3 import DatabaseSource
from ._models_py3 import DatabricksComputeInfo
from ._models_py3 import DatabricksConfiguration
from ._models_py3 import DatacacheConfiguration
from ._models_py3 import DatasetIdentifier
from ._models_py3 import DatasetInputDetails
from ._models_py3 import DatasetLineage
from ._models_py3 import DatasetOutput
from ._models_py3 import DatasetOutputDetails
from ._models_py3 import DatasetOutputOptions
from ._models_py3 import DatasetRegistration
from ._models_py3 import DatasetRegistrationOptions
from ._models_py3 import DatastoreSetting
from ._models_py3 import DbfsStorageInfoDto
from ._models_py3 import DebugInfoResponse
from ._models_py3 import DeployFlowRequest
from ._models_py3 import DeploymentInfo
from ._models_py3 import DistributionConfiguration
from ._models_py3 import DistributionParameter
from ._models_py3 import DoWhileControlFlowInfo
from ._models_py3 import DoWhileControlFlowRunSettings
from ._models_py3 import DockerBuildContext
from ._models_py3 import DockerConfiguration
from ._models_py3 import DockerImagePlatform
from ._models_py3 import DockerSection
from ._models_py3 import DockerSettingConfiguration
from ._models_py3 import DownloadResourceInfo
from ._models_py3 import EPRPipelineRunErrorClassificationRequest
from ._models_py3 import EndpointSetting
from ._models_py3 import EntityInterface
from ._models_py3 import EntrySetting
from ._models_py3 import EnumParameterRule
from ._models_py3 import EnvironmentConfiguration
from ._models_py3 import EnvironmentDefinition
from ._models_py3 import EnvironmentDefinitionDto
from ._models_py3 import ErrorAdditionalInfo
from ._models_py3 import ErrorResponse
from ._models_py3 import EsCloudConfiguration
from ._models_py3 import EvaluationFlowRunSettings
from ._models_py3 import ExampleRequest
from ._models_py3 import ExecutionContextDto
from ._models_py3 import ExecutionDataLocation
from ._models_py3 import ExecutionDataPath
from ._models_py3 import ExecutionGlobsOptions
from ._models_py3 import ExperimentComputeMetaInfo
from ._models_py3 import ExperimentInfo
from ._models_py3 import ExportComponentMetaInfo
from ._models_py3 import ExportDataTask
from ._models_py3 import FeaturizationSettings
from ._models_py3 import FeedDto
from ._models_py3 import FeedDtoSupportedAssetTypes
from ._models_py3 import FileSystem
from ._models_py3 import Flow
from ._models_py3 import FlowAnnotations
from ._models_py3 import FlowBaseDto
from ._models_py3 import FlowDto
from ._models_py3 import FlowEnvironment
from ._models_py3 import FlowFeature
from ._models_py3 import FlowFeatureState
from ._models_py3 import FlowGraph
from ._models_py3 import FlowGraphAnnotationNode
from ._models_py3 import FlowGraphLayout
from ._models_py3 import FlowGraphReference
from ._models_py3 import FlowIndexEntity
from ._models_py3 import FlowInputDefinition
from ._models_py3 import FlowNode
from ._models_py3 import FlowNodeLayout
from ._models_py3 import FlowNodeVariant
from ._models_py3 import FlowOutputDefinition
from ._models_py3 import FlowProperties
from ._models_py3 import FlowRunBasePath
from ._models_py3 import FlowRunInfo
from ._models_py3 import FlowRunResult
from ._models_py3 import FlowRunSettings
from ._models_py3 import FlowRuntimeCapability
from ._models_py3 import FlowRuntimeDto
from ._models_py3 import FlowSampleDto
from ._models_py3 import FlowSessionDto
from ._models_py3 import FlowSnapshot
from ._models_py3 import FlowSubmitRunSettings
from ._models_py3 import FlowTestInfo
from ._models_py3 import FlowTestStorageSetting
from ._models_py3 import FlowToolSettingParameter
from ._models_py3 import FlowToolsDto
from ._models_py3 import FlowVariantNode
from ._models_py3 import ForecastHorizon
from ._models_py3 import ForecastingSettings
from ._models_py3 import GeneralSettings
from ._models_py3 import GeneratePipelineComponentRequest
from ._models_py3 import GenerateToolMetaRequest
from ._models_py3 import GetDynamicListRequest
from ._models_py3 import GetRunDataResultDto
from ._models_py3 import GetTrainingSessionDto
from ._models_py3 import GlobalJobDispatcherConfiguration
from ._models_py3 import GlobsOptions
from ._models_py3 import GraphAnnotationNode
from ._models_py3 import GraphControlNode
from ._models_py3 import GraphControlReferenceNode
from ._models_py3 import GraphDatasetNode
from ._models_py3 import GraphDraftEntity
from ._models_py3 import GraphEdge
from ._models_py3 import GraphLayout
from ._models_py3 import GraphLayoutCreationInfo
from ._models_py3 import GraphModuleNode
from ._models_py3 import GraphModuleNodeRunSetting
from ._models_py3 import GraphModuleNodeUIInputSetting
from ._models_py3 import GraphNodeStatusInfo
from ._models_py3 import GraphReferenceNode
from ._models_py3 import HdfsReference
from ._models_py3 import HdiClusterComputeInfo
from ._models_py3 import HdiConfiguration
from ._models_py3 import HdiRunConfiguration
from ._models_py3 import HistoryConfiguration
from ._models_py3 import HyperDriveConfiguration
from ._models_py3 import ICheckableLongRunningOperationResponse
from ._models_py3 import IdentityConfiguration
from ._models_py3 import IdentitySetting
from ._models_py3 import ImportDataTask
from ._models_py3 import IndexedErrorResponse
from ._models_py3 import InitScriptInfoDto
from ._models_py3 import InnerErrorDetails
from ._models_py3 import InnerErrorResponse
from ._models_py3 import InputAsset
from ._models_py3 import InputData
from ._models_py3 import InputDataBinding
from ._models_py3 import InputDefinition
from ._models_py3 import InputOutputPortMetadata
from ._models_py3 import InputSetting
from ._models_py3 import IntellectualPropertyPublisherInformation
from ._models_py3 import InteractiveConfig
from ._models_py3 import InteractiveConfiguration
from ._models_py3 import JobCost
from ._models_py3 import JobEndpoint
from ._models_py3 import JobInput
from ._models_py3 import JobOutput
from ._models_py3 import JobOutputArtifacts
from ._models_py3 import JobScheduleDto
from ._models_py3 import K8SConfiguration
from ._models_py3 import KeyValuePairComponentNameMetaInfoErrorResponse
from ._models_py3 import KeyValuePairComponentNameMetaInfoModuleDto
from ._models_py3 import KeyValuePairStringObject
from ._models_py3 import KubernetesConfiguration
from ._models_py3 import Kwarg
from ._models_py3 import LegacyDataPath
from ._models_py3 import LimitSettings
from ._models_py3 import LinkedADBWorkspaceMetadata
from ._models_py3 import LinkedPipelineInfo
from ._models_py3 import LoadFlowAsComponentRequest
from ._models_py3 import LogRunTerminatedEventDto
from ._models_py3 import LongRunningOperationUriResponse
from ._models_py3 import LongRunningUpdateRegistryComponentRequest
from ._models_py3 import ManagedServiceIdentity
from ._models_py3 import MavenLibraryDto
from ._models_py3 import MetricProperties
from ._models_py3 import MetricSchemaDto
from ._models_py3 import MetricSchemaPropertyDto
from ._models_py3 import MetricV2Dto
from ._models_py3 import MetricV2Value
from ._models_py3 import MfeInternalAutologgerSettings
from ._models_py3 import MfeInternalIdentityConfiguration
from ._models_py3 import MfeInternalNodes
from ._models_py3 import MfeInternalOutputData
from ._models_py3 import MfeInternalSecretConfiguration
from ._models_py3 import MfeInternalUriReference
from ._models_py3 import MfeInternalV20211001ComponentJob
from ._models_py3 import MinMaxParameterRule
from ._models_py3 import MlcComputeInfo
from ._models_py3 import ModelDto
from ._models_py3 import ModelManagementErrorResponse
from ._models_py3 import ModifyPipelineJobScheduleDto
from ._models_py3 import ModuleDto
from ._models_py3 import ModuleDtoWithErrors
from ._models_py3 import ModuleDtoWithValidateStatus
from ._models_py3 import ModuleEntity
from ._models_py3 import ModulePythonInterface
from ._models_py3 import MpiConfiguration
from ._models_py3 import NCrossValidations
from ._models_py3 import Node
from ._models_py3 import NodeInputPort
from ._models_py3 import NodeLayout
from ._models_py3 import NodeOutputPort
from ._models_py3 import NodePortInterface
from ._models_py3 import NodeSource
from ._models_py3 import NodeTelemetryMetaInfo
from ._models_py3 import NodeVariant
from ._models_py3 import Nodes
from ._models_py3 import NoteBookTaskDto
from ._models_py3 import NotificationSetting
from ._models_py3 import ODataError
from ._models_py3 import ODataErrorDetail
from ._models_py3 import ODataErrorResponse
from ._models_py3 import ODataInnerError
from ._models_py3 import OutputData
from ._models_py3 import OutputDataBinding
from ._models_py3 import OutputDatasetLineage
from ._models_py3 import OutputDefinition
from ._models_py3 import OutputOptions
from ._models_py3 import OutputSetting
from ._models_py3 import OutputSettingSpec
from ._models_py3 import PaginatedDataInfoList
from ._models_py3 import PaginatedModelDtoList
from ._models_py3 import PaginatedModuleDtoList
from ._models_py3 import PaginatedPipelineDraftSummaryList
from ._models_py3 import PaginatedPipelineEndpointSummaryList
from ._models_py3 import PaginatedPipelineRunSummaryList
from ._models_py3 import PaginatedPublishedPipelineSummaryList
from ._models_py3 import ParallelForControlFlowInfo
from ._models_py3 import ParallelTaskConfiguration
from ._models_py3 import Parameter
from ._models_py3 import ParameterAssignment
from ._models_py3 import ParameterDefinition
from ._models_py3 import PatchFlowRequest
from ._models_py3 import Pipeline
from ._models_py3 import PipelineDraft
from ._models_py3 import PipelineDraftStepDetails
from ._models_py3 import PipelineDraftSummary
from ._models_py3 import PipelineEndpoint
from ._models_py3 import PipelineEndpointSummary
from ._models_py3 import PipelineGraph
from ._models_py3 import PipelineInput
from ._models_py3 import PipelineJob
from ._models_py3 import PipelineJobRuntimeBasicSettings
from ._models_py3 import PipelineJobScheduleDto
from ._models_py3 import PipelineOutput
from ._models_py3 import PipelineRun
from ._models_py3 import PipelineRunGraphDetail
from ._models_py3 import PipelineRunGraphStatus
from ._models_py3 import PipelineRunProfile
from ._models_py3 import PipelineRunStatus
from ._models_py3 import PipelineRunStepDetails
from ._models_py3 import PipelineRunSummary
from ._models_py3 import PipelineStatus
from ._models_py3 import PipelineStepRun
from ._models_py3 import PipelineStepRunOutputs
from ._models_py3 import PipelineSubDraft
from ._models_py3 import PolicyValidationResponse
from ._models_py3 import PortInfo
from ._models_py3 import PortOutputInfo
from ._models_py3 import PriorityConfig
from ._models_py3 import PriorityConfiguration
from ._models_py3 import PromoteDataSetRequest
from ._models_py3 import ProviderEntity
from ._models_py3 import PublishedPipeline
from ._models_py3 import PublishedPipelineSummary
from ._models_py3 import PyTorchConfiguration
from ._models_py3 import PythonInterfaceMapping
from ._models_py3 import PythonPyPiOrRCranLibraryDto
from ._models_py3 import PythonSection
from ._models_py3 import QueueingInfo
from ._models_py3 import RCranPackage
from ._models_py3 import RGitHubPackage
from ._models_py3 import RSection
from ._models_py3 import RawComponentDto
from ._models_py3 import RayConfiguration
from ._models_py3 import RealTimeEndpoint
from ._models_py3 import RealTimeEndpointInfo
from ._models_py3 import RealTimeEndpointStatus
from ._models_py3 import RealTimeEndpointSummary
from ._models_py3 import RealTimeEndpointTestRequest
from ._models_py3 import Recurrence
from ._models_py3 import RecurrencePattern
from ._models_py3 import RecurrenceSchedule
from ._models_py3 import RegenerateServiceKeysRequest
from ._models_py3 import RegisterComponentMetaInfo
from ._models_py3 import RegisterComponentMetaInfoExtraHashes
from ._models_py3 import RegisterComponentMetaInfoIdentifierHashes
from ._models_py3 import RegisterRegistryComponentMetaInfo
from ._models_py3 import RegisterRegistryComponentMetaInfoExtraHashes
from ._models_py3 import RegisterRegistryComponentMetaInfoIdentifierHashes
from ._models_py3 import RegisteredDataSetReference
from ._models_py3 import RegistrationOptions
from ._models_py3 import RegistryBlobReferenceData
from ._models_py3 import RegistryIdentity
from ._models_py3 import Relationship
from ._models_py3 import RemoteDockerComputeInfo
from ._models_py3 import ResourceConfig
from ._models_py3 import ResourceConfiguration
from ._models_py3 import ResourcesSetting
from ._models_py3 import RetrieveToolFuncResultRequest
from ._models_py3 import RetryConfiguration
from ._models_py3 import RootError
from ._models_py3 import RunAnnotations
from ._models_py3 import RunConfiguration
from ._models_py3 import RunDatasetReference
from ._models_py3 import RunDefinition
from ._models_py3 import RunDetailsDto
from ._models_py3 import RunDetailsWarningDto
from ._models_py3 import RunDto
from ._models_py3 import RunIndexEntity
from ._models_py3 import RunIndexMetricSummary
from ._models_py3 import RunIndexMetricSummarySystemObject
from ._models_py3 import RunIndexResourceMetricSummary
from ._models_py3 import RunMetricDto
from ._models_py3 import RunMetricsTypesDto
from ._models_py3 import RunProperties
from ._models_py3 import RunSettingParameter
from ._models_py3 import RunSettingParameterAssignment
from ._models_py3 import RunSettingUIParameterHint
from ._models_py3 import RunStatusPeriod
from ._models_py3 import RunTypeV2
from ._models_py3 import RunTypeV2Index
from ._models_py3 import RuntimeConfiguration
from ._models_py3 import SampleMeta
from ._models_py3 import SavePipelineDraftRequest
from ._models_py3 import SavedDataSetReference
from ._models_py3 import ScheduleBase
from ._models_py3 import SchemaContractsCreatedBy
from ._models_py3 import ScopeCloudConfiguration
from ._models_py3 import Seasonality
from ._models_py3 import SecretConfiguration
from ._models_py3 import SegmentedResult1
from ._models_py3 import ServiceLogRequest
from ._models_py3 import SessionApplication
from ._models_py3 import SessionApplicationRunCommandResult
from ._models_py3 import SessionProperties
from ._models_py3 import SetupFlowSessionRequest
from ._models_py3 import SharingScope
from ._models_py3 import Snapshot
from ._models_py3 import SnapshotInfo
from ._models_py3 import SourceCodeDataReference
from ._models_py3 import SparkConfiguration
from ._models_py3 import SparkJarTaskDto
from ._models_py3 import SparkJob
from ._models_py3 import SparkJobEntry
from ._models_py3 import SparkMavenPackage
from ._models_py3 import SparkPythonTaskDto
from ._models_py3 import SparkResourceConfiguration
from ._models_py3 import SparkSection
from ._models_py3 import SparkSubmitTaskDto
from ._models_py3 import SqlDataPath
from ._models_py3 import StackEnsembleSettings
from ._models_py3 import StandbyPoolProperties
from ._models_py3 import StandbyPoolResourceStatus
from ._models_py3 import StartRunResult
from ._models_py3 import StepRunProfile
from ._models_py3 import StorageInfo
from ._models_py3 import StoredProcedureParameter
from ._models_py3 import Stream
from ._models_py3 import StructuredInterface
from ._models_py3 import StructuredInterfaceInput
from ._models_py3 import StructuredInterfaceOutput
from ._models_py3 import StructuredInterfaceParameter
from ._models_py3 import StudioMigrationInfo
from ._models_py3 import SubGraphConcatenateAssignment
from ._models_py3 import SubGraphConfiguration
from ._models_py3 import SubGraphConnectionInfo
from ._models_py3 import SubGraphDataPathParameterAssignment
from ._models_py3 import SubGraphInfo
from ._models_py3 import SubGraphParameterAssignment
from ._models_py3 import SubGraphPortInfo
from ._models_py3 import SubPipelineDefinition
from ._models_py3 import SubPipelineParameterAssignment
from ._models_py3 import SubPipelinesInfo
from ._models_py3 import SubStatusPeriod
from ._models_py3 import SubmitBulkRunRequest
from ._models_py3 import SubmitBulkRunResponse
from ._models_py3 import SubmitFlowRequest
from ._models_py3 import SubmitPipelineRunRequest
from ._models_py3 import SweepEarlyTerminationPolicy
from ._models_py3 import SweepSettings
from ._models_py3 import SweepSettingsLimits
from ._models_py3 import SystemData
from ._models_py3 import SystemMeta
from ._models_py3 import SystemMetaExtraHashes
from ._models_py3 import SystemMetaIdentifierHashes
from ._models_py3 import TargetLags
from ._models_py3 import TargetRollingWindowSize
from ._models_py3 import TargetSelectorConfiguration
from ._models_py3 import Task
from ._models_py3 import TaskControlFlowInfo
from ._models_py3 import TaskReuseInfo
from ._models_py3 import TensorflowConfiguration
from ._models_py3 import TestDataSettings
from ._models_py3 import Tool
from ._models_py3 import ToolFuncResponse
from ._models_py3 import ToolInputDynamicList
from ._models_py3 import ToolInputGeneratedBy
from ._models_py3 import ToolMetaDto
from ._models_py3 import ToolSetting
from ._models_py3 import ToolSourceMeta
from ._models_py3 import TorchDistributedConfiguration
from ._models_py3 import TrainingDiagnosticConfiguration
from ._models_py3 import TrainingOutput
from ._models_py3 import TrainingSettings
from ._models_py3 import TriggerAsyncOperationStatus
from ._models_py3 import TuningNodeSetting
from ._models_py3 import TypedAssetReference
from ._models_py3 import UIAzureOpenAIDeploymentNameSelector
from ._models_py3 import UIAzureOpenAIModelCapabilities
from ._models_py3 import UIColumnPicker
from ._models_py3 import UIComputeSelection
from ._models_py3 import UIHyperparameterConfiguration
from ._models_py3 import UIInputSetting
from ._models_py3 import UIJsonEditor
from ._models_py3 import UIParameterHint
from ._models_py3 import UIPromptFlowConnectionSelector
from ._models_py3 import UIWidgetMetaInfo
from ._models_py3 import UIYamlEditor
from ._models_py3 import UnversionedEntityRequestDto
from ._models_py3 import UnversionedEntityResponseDto
from ._models_py3 import UnversionedRebuildIndexDto
from ._models_py3 import UnversionedRebuildResponseDto
from ._models_py3 import UpdateComponentRequest
from ._models_py3 import UpdateFlowRequest
from ._models_py3 import UpdateFlowRuntimeRequest
from ._models_py3 import UpdateRegistryComponentRequest
from ._models_py3 import UploadOptions
from ._models_py3 import UriReference
from ._models_py3 import User
from ._models_py3 import UserAssignedIdentity
from ._models_py3 import ValidationDataSettings
from ._models_py3 import VariantNode
from ._models_py3 import WebServiceComputeMetaInfo
from ._models_py3 import WebServicePort
from ._models_py3 import Webhook
from ._models_py3 import WorkspaceConnectionSpec
except (SyntaxError, ImportError):
from ._models import ACIAdvanceSettings # type: ignore
from ._models import AEVAComputeConfiguration # type: ignore
from ._models import AEVAResourceConfiguration # type: ignore
from ._models import AISuperComputerConfiguration # type: ignore
from ._models import AISuperComputerScalePolicy # type: ignore
from ._models import AISuperComputerStorageReferenceConfiguration # type: ignore
from ._models import AKSAdvanceSettings # type: ignore
from ._models import AKSReplicaStatus # type: ignore
from ._models import AMLComputeConfiguration # type: ignore
from ._models import APCloudConfiguration # type: ignore
from ._models import Activate # type: ignore
from ._models import AdditionalErrorInfo # type: ignore
from ._models import AdhocTriggerScheduledCommandJobRequest # type: ignore
from ._models import AdhocTriggerScheduledSparkJobRequest # type: ignore
from ._models import AetherAPCloudConfiguration # type: ignore
from ._models import AetherAmlDataset # type: ignore
from ._models import AetherAmlSparkCloudSetting # type: ignore
from ._models import AetherArgumentAssignment # type: ignore
from ._models import AetherAssetDefinition # type: ignore
from ._models import AetherAssetOutputSettings # type: ignore
from ._models import AetherAutoFeaturizeConfiguration # type: ignore
from ._models import AetherAutoMLComponentConfiguration # type: ignore
from ._models import AetherAutoTrainConfiguration # type: ignore
from ._models import AetherAzureBlobReference # type: ignore
from ._models import AetherAzureDataLakeGen2Reference # type: ignore
from ._models import AetherAzureDataLakeReference # type: ignore
from ._models import AetherAzureDatabaseReference # type: ignore
from ._models import AetherAzureFilesReference # type: ignore
from ._models import AetherBatchAiComputeInfo # type: ignore
from ._models import AetherBuildArtifactInfo # type: ignore
from ._models import AetherCloudBuildDropPathInfo # type: ignore
from ._models import AetherCloudBuildInfo # type: ignore
from ._models import AetherCloudBuildQueueInfo # type: ignore
from ._models import AetherCloudPrioritySetting # type: ignore
from ._models import AetherCloudSettings # type: ignore
from ._models import AetherColumnTransformer # type: ignore
from ._models import AetherComputeConfiguration # type: ignore
from ._models import AetherComputeSetting # type: ignore
from ._models import AetherControlInput # type: ignore
from ._models import AetherControlOutput # type: ignore
from ._models import AetherCopyDataTask # type: ignore
from ._models import AetherCosmosReference # type: ignore
from ._models import AetherCreatedBy # type: ignore
from ._models import AetherCustomReference # type: ignore
from ._models import AetherDBFSReference # type: ignore
from ._models import AetherDataLocation # type: ignore
from ._models import AetherDataLocationReuseCalculationFields # type: ignore
from ._models import AetherDataPath # type: ignore
from ._models import AetherDataReference # type: ignore
from ._models import AetherDataSetDefinition # type: ignore
from ._models import AetherDataSetDefinitionValue # type: ignore
from ._models import AetherDataSettings # type: ignore
from ._models import AetherDataTransferCloudConfiguration # type: ignore
from ._models import AetherDataTransferSink # type: ignore
from ._models import AetherDataTransferSource # type: ignore
from ._models import AetherDataTransferV2CloudSetting # type: ignore
from ._models import AetherDatabaseSink # type: ignore
from ._models import AetherDatabaseSource # type: ignore
from ._models import AetherDatabricksComputeInfo # type: ignore
from ._models import AetherDatasetOutput # type: ignore
from ._models import AetherDatasetOutputOptions # type: ignore
from ._models import AetherDatasetRegistration # type: ignore
from ._models import AetherDatastoreSetting # type: ignore
from ._models import AetherDoWhileControlFlowInfo # type: ignore
from ._models import AetherDoWhileControlFlowRunSettings # type: ignore
from ._models import AetherDockerSettingConfiguration # type: ignore
from ._models import AetherEntityInterfaceDocumentation # type: ignore
from ._models import AetherEntrySetting # type: ignore
from ._models import AetherEnvironmentConfiguration # type: ignore
from ._models import AetherEsCloudConfiguration # type: ignore
from ._models import AetherExportDataTask # type: ignore
from ._models import AetherFeaturizationSettings # type: ignore
from ._models import AetherFileSystem # type: ignore
from ._models import AetherForecastHorizon # type: ignore
from ._models import AetherForecastingSettings # type: ignore
from ._models import AetherGeneralSettings # type: ignore
from ._models import AetherGlobsOptions # type: ignore
from ._models import AetherGraphControlNode # type: ignore
from ._models import AetherGraphControlReferenceNode # type: ignore
from ._models import AetherGraphDatasetNode # type: ignore
from ._models import AetherGraphEdge # type: ignore
from ._models import AetherGraphEntity # type: ignore
from ._models import AetherGraphModuleNode # type: ignore
from ._models import AetherGraphReferenceNode # type: ignore
from ._models import AetherHdfsReference # type: ignore
from ._models import AetherHdiClusterComputeInfo # type: ignore
from ._models import AetherHdiRunConfiguration # type: ignore
from ._models import AetherHyperDriveConfiguration # type: ignore
from ._models import AetherIdentitySetting # type: ignore
from ._models import AetherImportDataTask # type: ignore
from ._models import AetherInputSetting # type: ignore
from ._models import AetherInteractiveConfig # type: ignore
from ._models import AetherK8SConfiguration # type: ignore
from ._models import AetherLegacyDataPath # type: ignore
from ._models import AetherLimitSettings # type: ignore
from ._models import AetherMlcComputeInfo # type: ignore
from ._models import AetherModuleEntity # type: ignore
from ._models import AetherModuleExtendedProperties # type: ignore
from ._models import AetherNCrossValidations # type: ignore
from ._models import AetherOutputSetting # type: ignore
from ._models import AetherParallelForControlFlowInfo # type: ignore
from ._models import AetherParameterAssignment # type: ignore
from ._models import AetherPhillyHdfsReference # type: ignore
from ._models import AetherPortInfo # type: ignore
from ._models import AetherPriorityConfig # type: ignore
from ._models import AetherPriorityConfiguration # type: ignore
from ._models import AetherRegisteredDataSetReference # type: ignore
from ._models import AetherRemoteDockerComputeInfo # type: ignore
from ._models import AetherResourceAssignment # type: ignore
from ._models import AetherResourceAttributeAssignment # type: ignore
from ._models import AetherResourceAttributeDefinition # type: ignore
from ._models import AetherResourceConfig # type: ignore
from ._models import AetherResourceConfiguration # type: ignore
from ._models import AetherResourceModel # type: ignore
from ._models import AetherResourcesSetting # type: ignore
from ._models import AetherSavedDataSetReference # type: ignore
from ._models import AetherScopeCloudConfiguration # type: ignore
from ._models import AetherSeasonality # type: ignore
from ._models import AetherSqlDataPath # type: ignore
from ._models import AetherStackEnsembleSettings # type: ignore
from ._models import AetherStoredProcedureParameter # type: ignore
from ._models import AetherStructuredInterface # type: ignore
from ._models import AetherStructuredInterfaceInput # type: ignore
from ._models import AetherStructuredInterfaceOutput # type: ignore
from ._models import AetherStructuredInterfaceParameter # type: ignore
from ._models import AetherSubGraphConfiguration # type: ignore
from ._models import AetherSweepEarlyTerminationPolicy # type: ignore
from ._models import AetherSweepSettings # type: ignore
from ._models import AetherSweepSettingsLimits # type: ignore
from ._models import AetherTargetLags # type: ignore
from ._models import AetherTargetRollingWindowSize # type: ignore
from ._models import AetherTargetSelectorConfiguration # type: ignore
from ._models import AetherTestDataSettings # type: ignore
from ._models import AetherTorchDistributedConfiguration # type: ignore
from ._models import AetherTrainingOutput # type: ignore
from ._models import AetherTrainingSettings # type: ignore
from ._models import AetherUIAzureOpenAIDeploymentNameSelector # type: ignore
from ._models import AetherUIAzureOpenAIModelCapabilities # type: ignore
from ._models import AetherUIColumnPicker # type: ignore
from ._models import AetherUIJsonEditor # type: ignore
from ._models import AetherUIParameterHint # type: ignore
from ._models import AetherUIPromptFlowConnectionSelector # type: ignore
from ._models import AetherValidationDataSettings # type: ignore
from ._models import AetherVsoBuildArtifactInfo # type: ignore
from ._models import AetherVsoBuildDefinitionInfo # type: ignore
from ._models import AetherVsoBuildInfo # type: ignore
from ._models import AmlDataset # type: ignore
from ._models import AmlK8SConfiguration # type: ignore
from ._models import AmlK8SPriorityConfiguration # type: ignore
from ._models import AmlSparkCloudSetting # type: ignore
from ._models import ApiAndParameters # type: ignore
from ._models import ApplicationEndpointConfiguration # type: ignore
from ._models import ArgumentAssignment # type: ignore
from ._models import Asset # type: ignore
from ._models import AssetDefinition # type: ignore
from ._models import AssetNameAndVersionIdentifier # type: ignore
from ._models import AssetOutputSettings # type: ignore
from ._models import AssetOutputSettingsParameter # type: ignore
from ._models import AssetPublishResult # type: ignore
from ._models import AssetPublishSingleRegionResult # type: ignore
from ._models import AssetTypeMetaInfo # type: ignore
from ._models import AssetVersionPublishRequest # type: ignore
from ._models import AssignedUser # type: ignore
from ._models import AuthKeys # type: ignore
from ._models import AutoClusterComputeSpecification # type: ignore
from ._models import AutoDeleteSetting # type: ignore
from ._models import AutoFeaturizeConfiguration # type: ignore
from ._models import AutoMLComponentConfiguration # type: ignore
from ._models import AutoScaler # type: ignore
from ._models import AutoTrainConfiguration # type: ignore
from ._models import AutologgerSettings # type: ignore
from ._models import AvailabilityResponse # type: ignore
from ._models import AzureBlobReference # type: ignore
from ._models import AzureDataLakeGen2Reference # type: ignore
from ._models import AzureDataLakeReference # type: ignore
from ._models import AzureDatabaseReference # type: ignore
from ._models import AzureFilesReference # type: ignore
from ._models import AzureMLModuleVersionDescriptor # type: ignore
from ._models import AzureOpenAIDeploymentDto # type: ignore
from ._models import AzureOpenAIModelCapabilities # type: ignore
from ._models import BatchAiComputeInfo # type: ignore
from ._models import BatchDataInput # type: ignore
from ._models import BatchExportComponentSpecResponse # type: ignore
from ._models import BatchExportRawComponentResponse # type: ignore
from ._models import BatchGetComponentHashesRequest # type: ignore
from ._models import BatchGetComponentRequest # type: ignore
from ._models import Binding # type: ignore
from ._models import BulkTestDto # type: ignore
from ._models import CloudError # type: ignore
from ._models import CloudPrioritySetting # type: ignore
from ._models import CloudSettings # type: ignore
from ._models import ColumnTransformer # type: ignore
from ._models import CommandJob # type: ignore
from ._models import CommandJobLimits # type: ignore
from ._models import CommandReturnCodeConfig # type: ignore
from ._models import ComponentConfiguration # type: ignore
from ._models import ComponentInput # type: ignore
from ._models import ComponentJob # type: ignore
from ._models import ComponentJobInput # type: ignore
from ._models import ComponentJobOutput # type: ignore
from ._models import ComponentNameAndDefaultVersion # type: ignore
from ._models import ComponentNameMetaInfo # type: ignore
from ._models import ComponentOutput # type: ignore
from ._models import ComponentPreflightResult # type: ignore
from ._models import ComponentSpecMetaInfo # type: ignore
from ._models import ComponentUpdateRequest # type: ignore
from ._models import ComponentValidationRequest # type: ignore
from ._models import ComponentValidationResponse # type: ignore
from ._models import Compute # type: ignore
from ._models import ComputeConfiguration # type: ignore
from ._models import ComputeContract # type: ignore
from ._models import ComputeIdentityContract # type: ignore
from ._models import ComputeIdentityDto # type: ignore
from ._models import ComputeInfo # type: ignore
from ._models import ComputeProperties # type: ignore
from ._models import ComputeRPUserAssignedIdentity # type: ignore
from ._models import ComputeRequest # type: ignore
from ._models import ComputeSetting # type: ignore
from ._models import ComputeStatus # type: ignore
from ._models import ComputeStatusDetail # type: ignore
from ._models import ComputeWarning # type: ignore
from ._models import ConnectionConfigSpec # type: ignore
from ._models import ConnectionDto # type: ignore
from ._models import ConnectionEntity # type: ignore
from ._models import ConnectionOverrideSetting # type: ignore
from ._models import ConnectionSpec # type: ignore
from ._models import ContainerInstanceConfiguration # type: ignore
from ._models import ContainerRegistry # type: ignore
from ._models import ContainerResourceRequirements # type: ignore
from ._models import ControlInput # type: ignore
from ._models import ControlOutput # type: ignore
from ._models import CopyDataTask # type: ignore
from ._models import CreateFlowFromSampleRequest # type: ignore
from ._models import CreateFlowRequest # type: ignore
from ._models import CreateFlowRuntimeRequest # type: ignore
from ._models import CreateFlowSessionRequest # type: ignore
from ._models import CreateInferencePipelineRequest # type: ignore
from ._models import CreateOrUpdateConnectionRequest # type: ignore
from ._models import CreateOrUpdateConnectionRequestDto # type: ignore
from ._models import CreatePipelineDraftRequest # type: ignore
from ._models import CreatePipelineJobScheduleDto # type: ignore
from ._models import CreatePublishedPipelineRequest # type: ignore
from ._models import CreateRealTimeEndpointRequest # type: ignore
from ._models import CreatedBy # type: ignore
from ._models import CreatedFromDto # type: ignore
from ._models import CreationContext # type: ignore
from ._models import Cron # type: ignore
from ._models import CustomConnectionConfig # type: ignore
from ._models import CustomReference # type: ignore
from ._models import DBFSReference # type: ignore
from ._models import Data # type: ignore
from ._models import DataInfo # type: ignore
from ._models import DataLocation # type: ignore
from ._models import DataPath # type: ignore
from ._models import DataPathParameter # type: ignore
from ._models import DataPortDto # type: ignore
from ._models import DataReference # type: ignore
from ._models import DataReferenceConfiguration # type: ignore
from ._models import DataSetDefinition # type: ignore
from ._models import DataSetDefinitionValue # type: ignore
from ._models import DataSetPathParameter # type: ignore
from ._models import DataSettings # type: ignore
from ._models import DataTransferCloudConfiguration # type: ignore
from ._models import DataTransferSink # type: ignore
from ._models import DataTransferSource # type: ignore
from ._models import DataTransferV2CloudSetting # type: ignore
from ._models import DataTypeCreationInfo # type: ignore
from ._models import DatabaseSink # type: ignore
from ._models import DatabaseSource # type: ignore
from ._models import DatabricksComputeInfo # type: ignore
from ._models import DatabricksConfiguration # type: ignore
from ._models import DatacacheConfiguration # type: ignore
from ._models import DatasetIdentifier # type: ignore
from ._models import DatasetInputDetails # type: ignore
from ._models import DatasetLineage # type: ignore
from ._models import DatasetOutput # type: ignore
from ._models import DatasetOutputDetails # type: ignore
from ._models import DatasetOutputOptions # type: ignore
from ._models import DatasetRegistration # type: ignore
from ._models import DatasetRegistrationOptions # type: ignore
from ._models import DatastoreSetting # type: ignore
from ._models import DbfsStorageInfoDto # type: ignore
from ._models import DebugInfoResponse # type: ignore
from ._models import DeployFlowRequest # type: ignore
from ._models import DeploymentInfo # type: ignore
from ._models import DistributionConfiguration # type: ignore
from ._models import DistributionParameter # type: ignore
from ._models import DoWhileControlFlowInfo # type: ignore
from ._models import DoWhileControlFlowRunSettings # type: ignore
from ._models import DockerBuildContext # type: ignore
from ._models import DockerConfiguration # type: ignore
from ._models import DockerImagePlatform # type: ignore
from ._models import DockerSection # type: ignore
from ._models import DockerSettingConfiguration # type: ignore
from ._models import DownloadResourceInfo # type: ignore
from ._models import EPRPipelineRunErrorClassificationRequest # type: ignore
from ._models import EndpointSetting # type: ignore
from ._models import EntityInterface # type: ignore
from ._models import EntrySetting # type: ignore
from ._models import EnumParameterRule # type: ignore
from ._models import EnvironmentConfiguration # type: ignore
from ._models import EnvironmentDefinition # type: ignore
from ._models import EnvironmentDefinitionDto # type: ignore
from ._models import ErrorAdditionalInfo # type: ignore
from ._models import ErrorResponse # type: ignore
from ._models import EsCloudConfiguration # type: ignore
from ._models import EvaluationFlowRunSettings # type: ignore
from ._models import ExampleRequest # type: ignore
from ._models import ExecutionContextDto # type: ignore
from ._models import ExecutionDataLocation # type: ignore
from ._models import ExecutionDataPath # type: ignore
from ._models import ExecutionGlobsOptions # type: ignore
from ._models import ExperimentComputeMetaInfo # type: ignore
from ._models import ExperimentInfo # type: ignore
from ._models import ExportComponentMetaInfo # type: ignore
from ._models import ExportDataTask # type: ignore
from ._models import FeaturizationSettings # type: ignore
from ._models import FeedDto # type: ignore
from ._models import FeedDtoSupportedAssetTypes # type: ignore
from ._models import FileSystem # type: ignore
from ._models import Flow # type: ignore
from ._models import FlowAnnotations # type: ignore
from ._models import FlowBaseDto # type: ignore
from ._models import FlowDto # type: ignore
from ._models import FlowEnvironment # type: ignore
from ._models import FlowFeature # type: ignore
from ._models import FlowFeatureState # type: ignore
from ._models import FlowGraph # type: ignore
from ._models import FlowGraphAnnotationNode # type: ignore
from ._models import FlowGraphLayout # type: ignore
from ._models import FlowGraphReference # type: ignore
from ._models import FlowIndexEntity # type: ignore
from ._models import FlowInputDefinition # type: ignore
from ._models import FlowNode # type: ignore
from ._models import FlowNodeLayout # type: ignore
from ._models import FlowNodeVariant # type: ignore
from ._models import FlowOutputDefinition # type: ignore
from ._models import FlowProperties # type: ignore
from ._models import FlowRunBasePath # type: ignore
from ._models import FlowRunInfo # type: ignore
from ._models import FlowRunResult # type: ignore
from ._models import FlowRunSettings # type: ignore
from ._models import FlowRuntimeCapability # type: ignore
from ._models import FlowRuntimeDto # type: ignore
from ._models import FlowSampleDto # type: ignore
from ._models import FlowSessionDto # type: ignore
from ._models import FlowSnapshot # type: ignore
from ._models import FlowSubmitRunSettings # type: ignore
from ._models import FlowTestInfo # type: ignore
from ._models import FlowTestStorageSetting # type: ignore
from ._models import FlowToolSettingParameter # type: ignore
from ._models import FlowToolsDto # type: ignore
from ._models import FlowVariantNode # type: ignore
from ._models import ForecastHorizon # type: ignore
from ._models import ForecastingSettings # type: ignore
from ._models import GeneralSettings # type: ignore
from ._models import GeneratePipelineComponentRequest # type: ignore
from ._models import GenerateToolMetaRequest # type: ignore
from ._models import GetDynamicListRequest # type: ignore
from ._models import GetRunDataResultDto # type: ignore
from ._models import GetTrainingSessionDto # type: ignore
from ._models import GlobalJobDispatcherConfiguration # type: ignore
from ._models import GlobsOptions # type: ignore
from ._models import GraphAnnotationNode # type: ignore
from ._models import GraphControlNode # type: ignore
from ._models import GraphControlReferenceNode # type: ignore
from ._models import GraphDatasetNode # type: ignore
from ._models import GraphDraftEntity # type: ignore
from ._models import GraphEdge # type: ignore
from ._models import GraphLayout # type: ignore
from ._models import GraphLayoutCreationInfo # type: ignore
from ._models import GraphModuleNode # type: ignore
from ._models import GraphModuleNodeRunSetting # type: ignore
from ._models import GraphModuleNodeUIInputSetting # type: ignore
from ._models import GraphNodeStatusInfo # type: ignore
from ._models import GraphReferenceNode # type: ignore
from ._models import HdfsReference # type: ignore
from ._models import HdiClusterComputeInfo # type: ignore
from ._models import HdiConfiguration # type: ignore
from ._models import HdiRunConfiguration # type: ignore
from ._models import HistoryConfiguration # type: ignore
from ._models import HyperDriveConfiguration # type: ignore
from ._models import ICheckableLongRunningOperationResponse # type: ignore
from ._models import IdentityConfiguration # type: ignore
from ._models import IdentitySetting # type: ignore
from ._models import ImportDataTask # type: ignore
from ._models import IndexedErrorResponse # type: ignore
from ._models import InitScriptInfoDto # type: ignore
from ._models import InnerErrorDetails # type: ignore
from ._models import InnerErrorResponse # type: ignore
from ._models import InputAsset # type: ignore
from ._models import InputData # type: ignore
from ._models import InputDataBinding # type: ignore
from ._models import InputDefinition # type: ignore
from ._models import InputOutputPortMetadata # type: ignore
from ._models import InputSetting # type: ignore
from ._models import IntellectualPropertyPublisherInformation # type: ignore
from ._models import InteractiveConfig # type: ignore
from ._models import InteractiveConfiguration # type: ignore
from ._models import JobCost # type: ignore
from ._models import JobEndpoint # type: ignore
from ._models import JobInput # type: ignore
from ._models import JobOutput # type: ignore
from ._models import JobOutputArtifacts # type: ignore
from ._models import JobScheduleDto # type: ignore
from ._models import K8SConfiguration # type: ignore
from ._models import KeyValuePairComponentNameMetaInfoErrorResponse # type: ignore
from ._models import KeyValuePairComponentNameMetaInfoModuleDto # type: ignore
from ._models import KeyValuePairStringObject # type: ignore
from ._models import KubernetesConfiguration # type: ignore
from ._models import Kwarg # type: ignore
from ._models import LegacyDataPath # type: ignore
from ._models import LimitSettings # type: ignore
from ._models import LinkedADBWorkspaceMetadata # type: ignore
from ._models import LinkedPipelineInfo # type: ignore
from ._models import LoadFlowAsComponentRequest # type: ignore
from ._models import LogRunTerminatedEventDto # type: ignore
from ._models import LongRunningOperationUriResponse # type: ignore
from ._models import LongRunningUpdateRegistryComponentRequest # type: ignore
from ._models import ManagedServiceIdentity # type: ignore
from ._models import MavenLibraryDto # type: ignore
from ._models import MetricProperties # type: ignore
from ._models import MetricSchemaDto # type: ignore
from ._models import MetricSchemaPropertyDto # type: ignore
from ._models import MetricV2Dto # type: ignore
from ._models import MetricV2Value # type: ignore
from ._models import MfeInternalAutologgerSettings # type: ignore
from ._models import MfeInternalIdentityConfiguration # type: ignore
from ._models import MfeInternalNodes # type: ignore
from ._models import MfeInternalOutputData # type: ignore
from ._models import MfeInternalSecretConfiguration # type: ignore
from ._models import MfeInternalUriReference # type: ignore
from ._models import MfeInternalV20211001ComponentJob # type: ignore
from ._models import MinMaxParameterRule # type: ignore
from ._models import MlcComputeInfo # type: ignore
from ._models import ModelDto # type: ignore
from ._models import ModelManagementErrorResponse # type: ignore
from ._models import ModifyPipelineJobScheduleDto # type: ignore
from ._models import ModuleDto # type: ignore
from ._models import ModuleDtoWithErrors # type: ignore
from ._models import ModuleDtoWithValidateStatus # type: ignore
from ._models import ModuleEntity # type: ignore
from ._models import ModulePythonInterface # type: ignore
from ._models import MpiConfiguration # type: ignore
from ._models import NCrossValidations # type: ignore
from ._models import Node # type: ignore
from ._models import NodeInputPort # type: ignore
from ._models import NodeLayout # type: ignore
from ._models import NodeOutputPort # type: ignore
from ._models import NodePortInterface # type: ignore
from ._models import NodeSource # type: ignore
from ._models import NodeTelemetryMetaInfo # type: ignore
from ._models import NodeVariant # type: ignore
from ._models import Nodes # type: ignore
from ._models import NoteBookTaskDto # type: ignore
from ._models import NotificationSetting # type: ignore
from ._models import ODataError # type: ignore
from ._models import ODataErrorDetail # type: ignore
from ._models import ODataErrorResponse # type: ignore
from ._models import ODataInnerError # type: ignore
from ._models import OutputData # type: ignore
from ._models import OutputDataBinding # type: ignore
from ._models import OutputDatasetLineage # type: ignore
from ._models import OutputDefinition # type: ignore
from ._models import OutputOptions # type: ignore
from ._models import OutputSetting # type: ignore
from ._models import OutputSettingSpec # type: ignore
from ._models import PaginatedDataInfoList # type: ignore
from ._models import PaginatedModelDtoList # type: ignore
from ._models import PaginatedModuleDtoList # type: ignore
from ._models import PaginatedPipelineDraftSummaryList # type: ignore
from ._models import PaginatedPipelineEndpointSummaryList # type: ignore
from ._models import PaginatedPipelineRunSummaryList # type: ignore
from ._models import PaginatedPublishedPipelineSummaryList # type: ignore
from ._models import ParallelForControlFlowInfo # type: ignore
from ._models import ParallelTaskConfiguration # type: ignore
from ._models import Parameter # type: ignore
from ._models import ParameterAssignment # type: ignore
from ._models import ParameterDefinition # type: ignore
from ._models import PatchFlowRequest # type: ignore
from ._models import Pipeline # type: ignore
from ._models import PipelineDraft # type: ignore
from ._models import PipelineDraftStepDetails # type: ignore
from ._models import PipelineDraftSummary # type: ignore
from ._models import PipelineEndpoint # type: ignore
from ._models import PipelineEndpointSummary # type: ignore
from ._models import PipelineGraph # type: ignore
from ._models import PipelineInput # type: ignore
from ._models import PipelineJob # type: ignore
from ._models import PipelineJobRuntimeBasicSettings # type: ignore
from ._models import PipelineJobScheduleDto # type: ignore
from ._models import PipelineOutput # type: ignore
from ._models import PipelineRun # type: ignore
from ._models import PipelineRunGraphDetail # type: ignore
from ._models import PipelineRunGraphStatus # type: ignore
from ._models import PipelineRunProfile # type: ignore
from ._models import PipelineRunStatus # type: ignore
from ._models import PipelineRunStepDetails # type: ignore
from ._models import PipelineRunSummary # type: ignore
from ._models import PipelineStatus # type: ignore
from ._models import PipelineStepRun # type: ignore
from ._models import PipelineStepRunOutputs # type: ignore
from ._models import PipelineSubDraft # type: ignore
from ._models import PolicyValidationResponse # type: ignore
from ._models import PortInfo # type: ignore
from ._models import PortOutputInfo # type: ignore
from ._models import PriorityConfig # type: ignore
from ._models import PriorityConfiguration # type: ignore
from ._models import PromoteDataSetRequest # type: ignore
from ._models import ProviderEntity # type: ignore
from ._models import PublishedPipeline # type: ignore
from ._models import PublishedPipelineSummary # type: ignore
from ._models import PyTorchConfiguration # type: ignore
from ._models import PythonInterfaceMapping # type: ignore
from ._models import PythonPyPiOrRCranLibraryDto # type: ignore
from ._models import PythonSection # type: ignore
from ._models import QueueingInfo # type: ignore
from ._models import RCranPackage # type: ignore
from ._models import RGitHubPackage # type: ignore
from ._models import RSection # type: ignore
from ._models import RawComponentDto # type: ignore
from ._models import RayConfiguration # type: ignore
from ._models import RealTimeEndpoint # type: ignore
from ._models import RealTimeEndpointInfo # type: ignore
from ._models import RealTimeEndpointStatus # type: ignore
from ._models import RealTimeEndpointSummary # type: ignore
from ._models import RealTimeEndpointTestRequest # type: ignore
from ._models import Recurrence # type: ignore
from ._models import RecurrencePattern # type: ignore
from ._models import RecurrenceSchedule # type: ignore
from ._models import RegenerateServiceKeysRequest # type: ignore
from ._models import RegisterComponentMetaInfo # type: ignore
from ._models import RegisterComponentMetaInfoExtraHashes # type: ignore
from ._models import RegisterComponentMetaInfoIdentifierHashes # type: ignore
from ._models import RegisterRegistryComponentMetaInfo # type: ignore
from ._models import RegisterRegistryComponentMetaInfoExtraHashes # type: ignore
from ._models import RegisterRegistryComponentMetaInfoIdentifierHashes # type: ignore
from ._models import RegisteredDataSetReference # type: ignore
from ._models import RegistrationOptions # type: ignore
from ._models import RegistryBlobReferenceData # type: ignore
from ._models import RegistryIdentity # type: ignore
from ._models import Relationship # type: ignore
from ._models import RemoteDockerComputeInfo # type: ignore
from ._models import ResourceConfig # type: ignore
from ._models import ResourceConfiguration # type: ignore
from ._models import ResourcesSetting # type: ignore
from ._models import RetrieveToolFuncResultRequest # type: ignore
from ._models import RetryConfiguration # type: ignore
from ._models import RootError # type: ignore
from ._models import RunAnnotations # type: ignore
from ._models import RunConfiguration # type: ignore
from ._models import RunDatasetReference # type: ignore
from ._models import RunDefinition # type: ignore
from ._models import RunDetailsDto # type: ignore
from ._models import RunDetailsWarningDto # type: ignore
from ._models import RunDto # type: ignore
from ._models import RunIndexEntity # type: ignore
from ._models import RunIndexMetricSummary # type: ignore
from ._models import RunIndexMetricSummarySystemObject # type: ignore
from ._models import RunIndexResourceMetricSummary # type: ignore
from ._models import RunMetricDto # type: ignore
from ._models import RunMetricsTypesDto # type: ignore
from ._models import RunProperties # type: ignore
from ._models import RunSettingParameter # type: ignore
from ._models import RunSettingParameterAssignment # type: ignore
from ._models import RunSettingUIParameterHint # type: ignore
from ._models import RunStatusPeriod # type: ignore
from ._models import RunTypeV2 # type: ignore
from ._models import RunTypeV2Index # type: ignore
from ._models import RuntimeConfiguration # type: ignore
from ._models import SampleMeta # type: ignore
from ._models import SavePipelineDraftRequest # type: ignore
from ._models import SavedDataSetReference # type: ignore
from ._models import ScheduleBase # type: ignore
from ._models import SchemaContractsCreatedBy # type: ignore
from ._models import ScopeCloudConfiguration # type: ignore
from ._models import Seasonality # type: ignore
from ._models import SecretConfiguration # type: ignore
from ._models import SegmentedResult1 # type: ignore
from ._models import ServiceLogRequest # type: ignore
from ._models import SessionApplication # type: ignore
from ._models import SessionApplicationRunCommandResult # type: ignore
from ._models import SessionProperties # type: ignore
from ._models import SetupFlowSessionRequest # type: ignore
from ._models import SharingScope # type: ignore
from ._models import Snapshot # type: ignore
from ._models import SnapshotInfo # type: ignore
from ._models import SourceCodeDataReference # type: ignore
from ._models import SparkConfiguration # type: ignore
from ._models import SparkJarTaskDto # type: ignore
from ._models import SparkJob # type: ignore
from ._models import SparkJobEntry # type: ignore
from ._models import SparkMavenPackage # type: ignore
from ._models import SparkPythonTaskDto # type: ignore
from ._models import SparkResourceConfiguration # type: ignore
from ._models import SparkSection # type: ignore
from ._models import SparkSubmitTaskDto # type: ignore
from ._models import SqlDataPath # type: ignore
from ._models import StackEnsembleSettings # type: ignore
from ._models import StandbyPoolProperties # type: ignore
from ._models import StandbyPoolResourceStatus # type: ignore
from ._models import StartRunResult # type: ignore
from ._models import StepRunProfile # type: ignore
from ._models import StorageInfo # type: ignore
from ._models import StoredProcedureParameter # type: ignore
from ._models import Stream # type: ignore
from ._models import StructuredInterface # type: ignore
from ._models import StructuredInterfaceInput # type: ignore
from ._models import StructuredInterfaceOutput # type: ignore
from ._models import StructuredInterfaceParameter # type: ignore
from ._models import StudioMigrationInfo # type: ignore
from ._models import SubGraphConcatenateAssignment # type: ignore
from ._models import SubGraphConfiguration # type: ignore
from ._models import SubGraphConnectionInfo # type: ignore
from ._models import SubGraphDataPathParameterAssignment # type: ignore
from ._models import SubGraphInfo # type: ignore
from ._models import SubGraphParameterAssignment # type: ignore
from ._models import SubGraphPortInfo # type: ignore
from ._models import SubPipelineDefinition # type: ignore
from ._models import SubPipelineParameterAssignment # type: ignore
from ._models import SubPipelinesInfo # type: ignore
from ._models import SubStatusPeriod # type: ignore
from ._models import SubmitBulkRunRequest # type: ignore
from ._models import SubmitBulkRunResponse # type: ignore
from ._models import SubmitFlowRequest # type: ignore
from ._models import SubmitPipelineRunRequest # type: ignore
from ._models import SweepEarlyTerminationPolicy # type: ignore
from ._models import SweepSettings # type: ignore
from ._models import SweepSettingsLimits # type: ignore
from ._models import SystemData # type: ignore
from ._models import SystemMeta # type: ignore
from ._models import SystemMetaExtraHashes # type: ignore
from ._models import SystemMetaIdentifierHashes # type: ignore
from ._models import TargetLags # type: ignore
from ._models import TargetRollingWindowSize # type: ignore
from ._models import TargetSelectorConfiguration # type: ignore
from ._models import Task # type: ignore
from ._models import TaskControlFlowInfo # type: ignore
from ._models import TaskReuseInfo # type: ignore
from ._models import TensorflowConfiguration # type: ignore
from ._models import TestDataSettings # type: ignore
from ._models import Tool # type: ignore
from ._models import ToolFuncResponse # type: ignore
from ._models import ToolInputDynamicList # type: ignore
from ._models import ToolInputGeneratedBy # type: ignore
from ._models import ToolMetaDto # type: ignore
from ._models import ToolSetting # type: ignore
from ._models import ToolSourceMeta # type: ignore
from ._models import TorchDistributedConfiguration # type: ignore
from ._models import TrainingDiagnosticConfiguration # type: ignore
from ._models import TrainingOutput # type: ignore
from ._models import TrainingSettings # type: ignore
from ._models import TriggerAsyncOperationStatus # type: ignore
from ._models import TuningNodeSetting # type: ignore
from ._models import TypedAssetReference # type: ignore
from ._models import UIAzureOpenAIDeploymentNameSelector # type: ignore
from ._models import UIAzureOpenAIModelCapabilities # type: ignore
from ._models import UIColumnPicker # type: ignore
from ._models import UIComputeSelection # type: ignore
from ._models import UIHyperparameterConfiguration # type: ignore
from ._models import UIInputSetting # type: ignore
from ._models import UIJsonEditor # type: ignore
from ._models import UIParameterHint # type: ignore
from ._models import UIPromptFlowConnectionSelector # type: ignore
from ._models import UIWidgetMetaInfo # type: ignore
from ._models import UIYamlEditor # type: ignore
from ._models import UnversionedEntityRequestDto # type: ignore
from ._models import UnversionedEntityResponseDto # type: ignore
from ._models import UnversionedRebuildIndexDto # type: ignore
from ._models import UnversionedRebuildResponseDto # type: ignore
from ._models import UpdateComponentRequest # type: ignore
from ._models import UpdateFlowRequest # type: ignore
from ._models import UpdateFlowRuntimeRequest # type: ignore
from ._models import UpdateRegistryComponentRequest # type: ignore
from ._models import UploadOptions # type: ignore
from ._models import UriReference # type: ignore
from ._models import User # type: ignore
from ._models import UserAssignedIdentity # type: ignore
from ._models import ValidationDataSettings # type: ignore
from ._models import VariantNode # type: ignore
from ._models import WebServiceComputeMetaInfo # type: ignore
from ._models import WebServicePort # type: ignore
from ._models import Webhook # type: ignore
from ._models import WorkspaceConnectionSpec # type: ignore
from ._azure_machine_learning_designer_service_client_enums import (
AEVAAssetType,
AEVADataStoreMode,
AEVAIdentityType,
ActionType,
AetherArgumentValueType,
AetherAssetType,
AetherBuildSourceType,
AetherComputeType,
AetherControlFlowType,
AetherControlInputValue,
AetherDataCopyMode,
AetherDataLocationStorageType,
AetherDataReferenceType,
AetherDataStoreMode,
AetherDataTransferStorageType,
AetherDataTransferTaskType,
AetherDatasetType,
AetherEarlyTerminationPolicyType,
AetherEntityStatus,
AetherExecutionEnvironment,
AetherExecutionPhase,
AetherFeaturizationMode,
AetherFileBasedPathType,
AetherForecastHorizonMode,
AetherIdentityType,
AetherLogVerbosity,
AetherModuleDeploymentSource,
AetherModuleHashVersion,
AetherModuleType,
AetherNCrossValidationMode,
AetherParameterType,
AetherParameterValueType,
AetherPrimaryMetrics,
AetherRepositoryType,
AetherResourceOperator,
AetherResourceValueType,
AetherSamplingAlgorithmType,
AetherSeasonalityMode,
AetherShortSeriesHandlingConfiguration,
AetherStackMetaLearnerType,
AetherStoredProcedureParameterType,
AetherTabularTrainingMode,
AetherTargetAggregationFunction,
AetherTargetLagsMode,
AetherTargetRollingWindowSizeMode,
AetherTaskType,
AetherTrainingOutputType,
AetherUIScriptLanguageEnum,
AetherUIWidgetTypeEnum,
AetherUploadState,
AetherUseStl,
ApplicationEndpointType,
ArgumentValueType,
AssetScopeTypes,
AssetSourceType,
AssetType,
AutoDeleteCondition,
BuildContextLocationType,
Communicator,
ComponentRegistrationTypeEnum,
ComponentType,
ComputeEnvironmentType,
ComputeTargetType,
ComputeType,
ConfigValueType,
ConnectionCategory,
ConnectionScope,
ConnectionSourceType,
ConnectionType,
ConsumeMode,
ControlFlowType,
ControlInputValue,
DataBindingMode,
DataCategory,
DataCopyMode,
DataLocationStorageType,
DataPortType,
DataReferenceType,
DataSourceType,
DataStoreMode,
DataTransferStorageType,
DataTransferTaskType,
DataTypeMechanism,
DatasetAccessModes,
DatasetConsumptionType,
DatasetDeliveryMechanism,
DatasetOutputType,
DatasetType,
DeliveryMechanism,
DistributionParameterEnum,
DistributionType,
EarlyTerminationPolicyType,
EmailNotificationEnableType,
EndpointAuthMode,
EntityKind,
EntityStatus,
ErrorHandlingMode,
ExecutionPhase,
FeaturizationMode,
FlowFeatureStateEnum,
FlowLanguage,
FlowPatchOperationType,
FlowRunMode,
FlowRunTypeEnum,
FlowRuntimeSubmissionApiVersion,
FlowTestMode,
FlowType,
ForecastHorizonMode,
Framework,
Frequency,
GlobalJobDispatcherSupportedComputeType,
GraphComponentsMode,
GraphDatasetsLoadModes,
GraphSdkCodeType,
HttpStatusCode,
IdentityType,
InputType,
IntellectualPropertyAccessMode,
JobInputType,
JobLimitsType,
JobOutputType,
JobProvisioningState,
JobStatus,
JobType,
KeyType,
ListViewType,
LogLevel,
LogVerbosity,
LongRunningUpdateType,
MLFlowAutologgerState,
ManagedServiceIdentityType,
MetricValueType,
MfeInternalIdentityType,
MfeInternalMLFlowAutologgerState,
MfeInternalScheduleStatus,
ModuleDtoFields,
ModuleInfoFromYamlStatusEnum,
ModuleRunSettingTypes,
ModuleScope,
ModuleSourceType,
ModuleType,
ModuleUpdateOperationType,
ModuleWorkingMechanism,
NCrossValidationMode,
NodeCompositionMode,
NodesValueType,
Orientation,
OutputMechanism,
ParameterType,
ParameterValueType,
PipelineDraftMode,
PipelineRunStatusCode,
PipelineStatusCode,
PipelineType,
PortAction,
PrimaryMetrics,
ProvisioningState,
RealTimeEndpointInternalStepCode,
RealTimeEndpointOpCode,
RealTimeEndpointOpStatusCode,
RecurrenceFrequency,
RunDisplayNameGenerationType,
RunSettingParameterType,
RunSettingUIWidgetTypeEnum,
RunStatus,
RunType,
RuntimeStatusEnum,
RuntimeType,
SamplingAlgorithmType,
ScheduleProvisioningStatus,
ScheduleStatus,
ScheduleType,
ScopeType,
ScriptType,
SeasonalityMode,
Section,
SessionSetupModeEnum,
SetupFlowSessionAction,
SeverityLevel,
ShortSeriesHandlingConfiguration,
StackMetaLearnerType,
StorageAuthType,
StoredProcedureParameterType,
SuccessfulCommandReturnCode,
TabularTrainingMode,
TargetAggregationFunction,
TargetLagsMode,
TargetRollingWindowSizeMode,
TaskCreationOptions,
TaskStatus,
TaskStatusCode,
TaskType,
ToolFuncCallScenario,
ToolState,
ToolType,
TrainingOutputType,
TriggerOperationType,
TriggerType,
UIInputDataDeliveryMode,
UIScriptLanguageEnum,
UIWidgetTypeEnum,
UploadState,
UseStl,
UserType,
ValidationStatus,
ValueType,
VmPriority,
WebServiceState,
WeekDays,
Weekday,
YarnDeployMode,
)
__all__ = [
'ACIAdvanceSettings',
'AEVAComputeConfiguration',
'AEVAResourceConfiguration',
'AISuperComputerConfiguration',
'AISuperComputerScalePolicy',
'AISuperComputerStorageReferenceConfiguration',
'AKSAdvanceSettings',
'AKSReplicaStatus',
'AMLComputeConfiguration',
'APCloudConfiguration',
'Activate',
'AdditionalErrorInfo',
'AdhocTriggerScheduledCommandJobRequest',
'AdhocTriggerScheduledSparkJobRequest',
'AetherAPCloudConfiguration',
'AetherAmlDataset',
'AetherAmlSparkCloudSetting',
'AetherArgumentAssignment',
'AetherAssetDefinition',
'AetherAssetOutputSettings',
'AetherAutoFeaturizeConfiguration',
'AetherAutoMLComponentConfiguration',
'AetherAutoTrainConfiguration',
'AetherAzureBlobReference',
'AetherAzureDataLakeGen2Reference',
'AetherAzureDataLakeReference',
'AetherAzureDatabaseReference',
'AetherAzureFilesReference',
'AetherBatchAiComputeInfo',
'AetherBuildArtifactInfo',
'AetherCloudBuildDropPathInfo',
'AetherCloudBuildInfo',
'AetherCloudBuildQueueInfo',
'AetherCloudPrioritySetting',
'AetherCloudSettings',
'AetherColumnTransformer',
'AetherComputeConfiguration',
'AetherComputeSetting',
'AetherControlInput',
'AetherControlOutput',
'AetherCopyDataTask',
'AetherCosmosReference',
'AetherCreatedBy',
'AetherCustomReference',
'AetherDBFSReference',
'AetherDataLocation',
'AetherDataLocationReuseCalculationFields',
'AetherDataPath',
'AetherDataReference',
'AetherDataSetDefinition',
'AetherDataSetDefinitionValue',
'AetherDataSettings',
'AetherDataTransferCloudConfiguration',
'AetherDataTransferSink',
'AetherDataTransferSource',
'AetherDataTransferV2CloudSetting',
'AetherDatabaseSink',
'AetherDatabaseSource',
'AetherDatabricksComputeInfo',
'AetherDatasetOutput',
'AetherDatasetOutputOptions',
'AetherDatasetRegistration',
'AetherDatastoreSetting',
'AetherDoWhileControlFlowInfo',
'AetherDoWhileControlFlowRunSettings',
'AetherDockerSettingConfiguration',
'AetherEntityInterfaceDocumentation',
'AetherEntrySetting',
'AetherEnvironmentConfiguration',
'AetherEsCloudConfiguration',
'AetherExportDataTask',
'AetherFeaturizationSettings',
'AetherFileSystem',
'AetherForecastHorizon',
'AetherForecastingSettings',
'AetherGeneralSettings',
'AetherGlobsOptions',
'AetherGraphControlNode',
'AetherGraphControlReferenceNode',
'AetherGraphDatasetNode',
'AetherGraphEdge',
'AetherGraphEntity',
'AetherGraphModuleNode',
'AetherGraphReferenceNode',
'AetherHdfsReference',
'AetherHdiClusterComputeInfo',
'AetherHdiRunConfiguration',
'AetherHyperDriveConfiguration',
'AetherIdentitySetting',
'AetherImportDataTask',
'AetherInputSetting',
'AetherInteractiveConfig',
'AetherK8SConfiguration',
'AetherLegacyDataPath',
'AetherLimitSettings',
'AetherMlcComputeInfo',
'AetherModuleEntity',
'AetherModuleExtendedProperties',
'AetherNCrossValidations',
'AetherOutputSetting',
'AetherParallelForControlFlowInfo',
'AetherParameterAssignment',
'AetherPhillyHdfsReference',
'AetherPortInfo',
'AetherPriorityConfig',
'AetherPriorityConfiguration',
'AetherRegisteredDataSetReference',
'AetherRemoteDockerComputeInfo',
'AetherResourceAssignment',
'AetherResourceAttributeAssignment',
'AetherResourceAttributeDefinition',
'AetherResourceConfig',
'AetherResourceConfiguration',
'AetherResourceModel',
'AetherResourcesSetting',
'AetherSavedDataSetReference',
'AetherScopeCloudConfiguration',
'AetherSeasonality',
'AetherSqlDataPath',
'AetherStackEnsembleSettings',
'AetherStoredProcedureParameter',
'AetherStructuredInterface',
'AetherStructuredInterfaceInput',
'AetherStructuredInterfaceOutput',
'AetherStructuredInterfaceParameter',
'AetherSubGraphConfiguration',
'AetherSweepEarlyTerminationPolicy',
'AetherSweepSettings',
'AetherSweepSettingsLimits',
'AetherTargetLags',
'AetherTargetRollingWindowSize',
'AetherTargetSelectorConfiguration',
'AetherTestDataSettings',
'AetherTorchDistributedConfiguration',
'AetherTrainingOutput',
'AetherTrainingSettings',
'AetherUIAzureOpenAIDeploymentNameSelector',
'AetherUIAzureOpenAIModelCapabilities',
'AetherUIColumnPicker',
'AetherUIJsonEditor',
'AetherUIParameterHint',
'AetherUIPromptFlowConnectionSelector',
'AetherValidationDataSettings',
'AetherVsoBuildArtifactInfo',
'AetherVsoBuildDefinitionInfo',
'AetherVsoBuildInfo',
'AmlDataset',
'AmlK8SConfiguration',
'AmlK8SPriorityConfiguration',
'AmlSparkCloudSetting',
'ApiAndParameters',
'ApplicationEndpointConfiguration',
'ArgumentAssignment',
'Asset',
'AssetDefinition',
'AssetNameAndVersionIdentifier',
'AssetOutputSettings',
'AssetOutputSettingsParameter',
'AssetPublishResult',
'AssetPublishSingleRegionResult',
'AssetTypeMetaInfo',
'AssetVersionPublishRequest',
'AssignedUser',
'AuthKeys',
'AutoClusterComputeSpecification',
'AutoDeleteSetting',
'AutoFeaturizeConfiguration',
'AutoMLComponentConfiguration',
'AutoScaler',
'AutoTrainConfiguration',
'AutologgerSettings',
'AvailabilityResponse',
'AzureBlobReference',
'AzureDataLakeGen2Reference',
'AzureDataLakeReference',
'AzureDatabaseReference',
'AzureFilesReference',
'AzureMLModuleVersionDescriptor',
'AzureOpenAIDeploymentDto',
'AzureOpenAIModelCapabilities',
'BatchAiComputeInfo',
'BatchDataInput',
'BatchExportComponentSpecResponse',
'BatchExportRawComponentResponse',
'BatchGetComponentHashesRequest',
'BatchGetComponentRequest',
'Binding',
'BulkTestDto',
'CloudError',
'CloudPrioritySetting',
'CloudSettings',
'ColumnTransformer',
'CommandJob',
'CommandJobLimits',
'CommandReturnCodeConfig',
'ComponentConfiguration',
'ComponentInput',
'ComponentJob',
'ComponentJobInput',
'ComponentJobOutput',
'ComponentNameAndDefaultVersion',
'ComponentNameMetaInfo',
'ComponentOutput',
'ComponentPreflightResult',
'ComponentSpecMetaInfo',
'ComponentUpdateRequest',
'ComponentValidationRequest',
'ComponentValidationResponse',
'Compute',
'ComputeConfiguration',
'ComputeContract',
'ComputeIdentityContract',
'ComputeIdentityDto',
'ComputeInfo',
'ComputeProperties',
'ComputeRPUserAssignedIdentity',
'ComputeRequest',
'ComputeSetting',
'ComputeStatus',
'ComputeStatusDetail',
'ComputeWarning',
'ConnectionConfigSpec',
'ConnectionDto',
'ConnectionEntity',
'ConnectionOverrideSetting',
'ConnectionSpec',
'ContainerInstanceConfiguration',
'ContainerRegistry',
'ContainerResourceRequirements',
'ControlInput',
'ControlOutput',
'CopyDataTask',
'CreateFlowFromSampleRequest',
'CreateFlowRequest',
'CreateFlowRuntimeRequest',
'CreateFlowSessionRequest',
'CreateInferencePipelineRequest',
'CreateOrUpdateConnectionRequest',
'CreateOrUpdateConnectionRequestDto',
'CreatePipelineDraftRequest',
'CreatePipelineJobScheduleDto',
'CreatePublishedPipelineRequest',
'CreateRealTimeEndpointRequest',
'CreatedBy',
'CreatedFromDto',
'CreationContext',
'Cron',
'CustomConnectionConfig',
'CustomReference',
'DBFSReference',
'Data',
'DataInfo',
'DataLocation',
'DataPath',
'DataPathParameter',
'DataPortDto',
'DataReference',
'DataReferenceConfiguration',
'DataSetDefinition',
'DataSetDefinitionValue',
'DataSetPathParameter',
'DataSettings',
'DataTransferCloudConfiguration',
'DataTransferSink',
'DataTransferSource',
'DataTransferV2CloudSetting',
'DataTypeCreationInfo',
'DatabaseSink',
'DatabaseSource',
'DatabricksComputeInfo',
'DatabricksConfiguration',
'DatacacheConfiguration',
'DatasetIdentifier',
'DatasetInputDetails',
'DatasetLineage',
'DatasetOutput',
'DatasetOutputDetails',
'DatasetOutputOptions',
'DatasetRegistration',
'DatasetRegistrationOptions',
'DatastoreSetting',
'DbfsStorageInfoDto',
'DebugInfoResponse',
'DeployFlowRequest',
'DeploymentInfo',
'DistributionConfiguration',
'DistributionParameter',
'DoWhileControlFlowInfo',
'DoWhileControlFlowRunSettings',
'DockerBuildContext',
'DockerConfiguration',
'DockerImagePlatform',
'DockerSection',
'DockerSettingConfiguration',
'DownloadResourceInfo',
'EPRPipelineRunErrorClassificationRequest',
'EndpointSetting',
'EntityInterface',
'EntrySetting',
'EnumParameterRule',
'EnvironmentConfiguration',
'EnvironmentDefinition',
'EnvironmentDefinitionDto',
'ErrorAdditionalInfo',
'ErrorResponse',
'EsCloudConfiguration',
'EvaluationFlowRunSettings',
'ExampleRequest',
'ExecutionContextDto',
'ExecutionDataLocation',
'ExecutionDataPath',
'ExecutionGlobsOptions',
'ExperimentComputeMetaInfo',
'ExperimentInfo',
'ExportComponentMetaInfo',
'ExportDataTask',
'FeaturizationSettings',
'FeedDto',
'FeedDtoSupportedAssetTypes',
'FileSystem',
'Flow',
'FlowAnnotations',
'FlowBaseDto',
'FlowDto',
'FlowEnvironment',
'FlowFeature',
'FlowFeatureState',
'FlowGraph',
'FlowGraphAnnotationNode',
'FlowGraphLayout',
'FlowGraphReference',
'FlowIndexEntity',
'FlowInputDefinition',
'FlowNode',
'FlowNodeLayout',
'FlowNodeVariant',
'FlowOutputDefinition',
'FlowProperties',
'FlowRunBasePath',
'FlowRunInfo',
'FlowRunResult',
'FlowRunSettings',
'FlowRuntimeCapability',
'FlowRuntimeDto',
'FlowSampleDto',
'FlowSessionDto',
'FlowSnapshot',
'FlowSubmitRunSettings',
'FlowTestInfo',
'FlowTestStorageSetting',
'FlowToolSettingParameter',
'FlowToolsDto',
'FlowVariantNode',
'ForecastHorizon',
'ForecastingSettings',
'GeneralSettings',
'GeneratePipelineComponentRequest',
'GenerateToolMetaRequest',
'GetDynamicListRequest',
'GetRunDataResultDto',
'GetTrainingSessionDto',
'GlobalJobDispatcherConfiguration',
'GlobsOptions',
'GraphAnnotationNode',
'GraphControlNode',
'GraphControlReferenceNode',
'GraphDatasetNode',
'GraphDraftEntity',
'GraphEdge',
'GraphLayout',
'GraphLayoutCreationInfo',
'GraphModuleNode',
'GraphModuleNodeRunSetting',
'GraphModuleNodeUIInputSetting',
'GraphNodeStatusInfo',
'GraphReferenceNode',
'HdfsReference',
'HdiClusterComputeInfo',
'HdiConfiguration',
'HdiRunConfiguration',
'HistoryConfiguration',
'HyperDriveConfiguration',
'ICheckableLongRunningOperationResponse',
'IdentityConfiguration',
'IdentitySetting',
'ImportDataTask',
'IndexedErrorResponse',
'InitScriptInfoDto',
'InnerErrorDetails',
'InnerErrorResponse',
'InputAsset',
'InputData',
'InputDataBinding',
'InputDefinition',
'InputOutputPortMetadata',
'InputSetting',
'IntellectualPropertyPublisherInformation',
'InteractiveConfig',
'InteractiveConfiguration',
'JobCost',
'JobEndpoint',
'JobInput',
'JobOutput',
'JobOutputArtifacts',
'JobScheduleDto',
'K8SConfiguration',
'KeyValuePairComponentNameMetaInfoErrorResponse',
'KeyValuePairComponentNameMetaInfoModuleDto',
'KeyValuePairStringObject',
'KubernetesConfiguration',
'Kwarg',
'LegacyDataPath',
'LimitSettings',
'LinkedADBWorkspaceMetadata',
'LinkedPipelineInfo',
'LoadFlowAsComponentRequest',
'LogRunTerminatedEventDto',
'LongRunningOperationUriResponse',
'LongRunningUpdateRegistryComponentRequest',
'ManagedServiceIdentity',
'MavenLibraryDto',
'MetricProperties',
'MetricSchemaDto',
'MetricSchemaPropertyDto',
'MetricV2Dto',
'MetricV2Value',
'MfeInternalAutologgerSettings',
'MfeInternalIdentityConfiguration',
'MfeInternalNodes',
'MfeInternalOutputData',
'MfeInternalSecretConfiguration',
'MfeInternalUriReference',
'MfeInternalV20211001ComponentJob',
'MinMaxParameterRule',
'MlcComputeInfo',
'ModelDto',
'ModelManagementErrorResponse',
'ModifyPipelineJobScheduleDto',
'ModuleDto',
'ModuleDtoWithErrors',
'ModuleDtoWithValidateStatus',
'ModuleEntity',
'ModulePythonInterface',
'MpiConfiguration',
'NCrossValidations',
'Node',
'NodeInputPort',
'NodeLayout',
'NodeOutputPort',
'NodePortInterface',
'NodeSource',
'NodeTelemetryMetaInfo',
'NodeVariant',
'Nodes',
'NoteBookTaskDto',
'NotificationSetting',
'ODataError',
'ODataErrorDetail',
'ODataErrorResponse',
'ODataInnerError',
'OutputData',
'OutputDataBinding',
'OutputDatasetLineage',
'OutputDefinition',
'OutputOptions',
'OutputSetting',
'OutputSettingSpec',
'PaginatedDataInfoList',
'PaginatedModelDtoList',
'PaginatedModuleDtoList',
'PaginatedPipelineDraftSummaryList',
'PaginatedPipelineEndpointSummaryList',
'PaginatedPipelineRunSummaryList',
'PaginatedPublishedPipelineSummaryList',
'ParallelForControlFlowInfo',
'ParallelTaskConfiguration',
'Parameter',
'ParameterAssignment',
'ParameterDefinition',
'PatchFlowRequest',
'Pipeline',
'PipelineDraft',
'PipelineDraftStepDetails',
'PipelineDraftSummary',
'PipelineEndpoint',
'PipelineEndpointSummary',
'PipelineGraph',
'PipelineInput',
'PipelineJob',
'PipelineJobRuntimeBasicSettings',
'PipelineJobScheduleDto',
'PipelineOutput',
'PipelineRun',
'PipelineRunGraphDetail',
'PipelineRunGraphStatus',
'PipelineRunProfile',
'PipelineRunStatus',
'PipelineRunStepDetails',
'PipelineRunSummary',
'PipelineStatus',
'PipelineStepRun',
'PipelineStepRunOutputs',
'PipelineSubDraft',
'PolicyValidationResponse',
'PortInfo',
'PortOutputInfo',
'PriorityConfig',
'PriorityConfiguration',
'PromoteDataSetRequest',
'ProviderEntity',
'PublishedPipeline',
'PublishedPipelineSummary',
'PyTorchConfiguration',
'PythonInterfaceMapping',
'PythonPyPiOrRCranLibraryDto',
'PythonSection',
'QueueingInfo',
'RCranPackage',
'RGitHubPackage',
'RSection',
'RawComponentDto',
'RayConfiguration',
'RealTimeEndpoint',
'RealTimeEndpointInfo',
'RealTimeEndpointStatus',
'RealTimeEndpointSummary',
'RealTimeEndpointTestRequest',
'Recurrence',
'RecurrencePattern',
'RecurrenceSchedule',
'RegenerateServiceKeysRequest',
'RegisterComponentMetaInfo',
'RegisterComponentMetaInfoExtraHashes',
'RegisterComponentMetaInfoIdentifierHashes',
'RegisterRegistryComponentMetaInfo',
'RegisterRegistryComponentMetaInfoExtraHashes',
'RegisterRegistryComponentMetaInfoIdentifierHashes',
'RegisteredDataSetReference',
'RegistrationOptions',
'RegistryBlobReferenceData',
'RegistryIdentity',
'Relationship',
'RemoteDockerComputeInfo',
'ResourceConfig',
'ResourceConfiguration',
'ResourcesSetting',
'RetrieveToolFuncResultRequest',
'RetryConfiguration',
'RootError',
'RunAnnotations',
'RunConfiguration',
'RunDatasetReference',
'RunDefinition',
'RunDetailsDto',
'RunDetailsWarningDto',
'RunDto',
'RunIndexEntity',
'RunIndexMetricSummary',
'RunIndexMetricSummarySystemObject',
'RunIndexResourceMetricSummary',
'RunMetricDto',
'RunMetricsTypesDto',
'RunProperties',
'RunSettingParameter',
'RunSettingParameterAssignment',
'RunSettingUIParameterHint',
'RunStatusPeriod',
'RunTypeV2',
'RunTypeV2Index',
'RuntimeConfiguration',
'SampleMeta',
'SavePipelineDraftRequest',
'SavedDataSetReference',
'ScheduleBase',
'SchemaContractsCreatedBy',
'ScopeCloudConfiguration',
'Seasonality',
'SecretConfiguration',
'SegmentedResult1',
'ServiceLogRequest',
'SessionApplication',
'SessionApplicationRunCommandResult',
'SessionProperties',
'SetupFlowSessionRequest',
'SharingScope',
'Snapshot',
'SnapshotInfo',
'SourceCodeDataReference',
'SparkConfiguration',
'SparkJarTaskDto',
'SparkJob',
'SparkJobEntry',
'SparkMavenPackage',
'SparkPythonTaskDto',
'SparkResourceConfiguration',
'SparkSection',
'SparkSubmitTaskDto',
'SqlDataPath',
'StackEnsembleSettings',
'StandbyPoolProperties',
'StandbyPoolResourceStatus',
'StartRunResult',
'StepRunProfile',
'StorageInfo',
'StoredProcedureParameter',
'Stream',
'StructuredInterface',
'StructuredInterfaceInput',
'StructuredInterfaceOutput',
'StructuredInterfaceParameter',
'StudioMigrationInfo',
'SubGraphConcatenateAssignment',
'SubGraphConfiguration',
'SubGraphConnectionInfo',
'SubGraphDataPathParameterAssignment',
'SubGraphInfo',
'SubGraphParameterAssignment',
'SubGraphPortInfo',
'SubPipelineDefinition',
'SubPipelineParameterAssignment',
'SubPipelinesInfo',
'SubStatusPeriod',
'SubmitBulkRunRequest',
'SubmitBulkRunResponse',
'SubmitFlowRequest',
'SubmitPipelineRunRequest',
'SweepEarlyTerminationPolicy',
'SweepSettings',
'SweepSettingsLimits',
'SystemData',
'SystemMeta',
'SystemMetaExtraHashes',
'SystemMetaIdentifierHashes',
'TargetLags',
'TargetRollingWindowSize',
'TargetSelectorConfiguration',
'Task',
'TaskControlFlowInfo',
'TaskReuseInfo',
'TensorflowConfiguration',
'TestDataSettings',
'Tool',
'ToolFuncResponse',
'ToolInputDynamicList',
'ToolInputGeneratedBy',
'ToolMetaDto',
'ToolSetting',
'ToolSourceMeta',
'TorchDistributedConfiguration',
'TrainingDiagnosticConfiguration',
'TrainingOutput',
'TrainingSettings',
'TriggerAsyncOperationStatus',
'TuningNodeSetting',
'TypedAssetReference',
'UIAzureOpenAIDeploymentNameSelector',
'UIAzureOpenAIModelCapabilities',
'UIColumnPicker',
'UIComputeSelection',
'UIHyperparameterConfiguration',
'UIInputSetting',
'UIJsonEditor',
'UIParameterHint',
'UIPromptFlowConnectionSelector',
'UIWidgetMetaInfo',
'UIYamlEditor',
'UnversionedEntityRequestDto',
'UnversionedEntityResponseDto',
'UnversionedRebuildIndexDto',
'UnversionedRebuildResponseDto',
'UpdateComponentRequest',
'UpdateFlowRequest',
'UpdateFlowRuntimeRequest',
'UpdateRegistryComponentRequest',
'UploadOptions',
'UriReference',
'User',
'UserAssignedIdentity',
'ValidationDataSettings',
'VariantNode',
'WebServiceComputeMetaInfo',
'WebServicePort',
'Webhook',
'WorkspaceConnectionSpec',
'AEVAAssetType',
'AEVADataStoreMode',
'AEVAIdentityType',
'ActionType',
'AetherArgumentValueType',
'AetherAssetType',
'AetherBuildSourceType',
'AetherComputeType',
'AetherControlFlowType',
'AetherControlInputValue',
'AetherDataCopyMode',
'AetherDataLocationStorageType',
'AetherDataReferenceType',
'AetherDataStoreMode',
'AetherDataTransferStorageType',
'AetherDataTransferTaskType',
'AetherDatasetType',
'AetherEarlyTerminationPolicyType',
'AetherEntityStatus',
'AetherExecutionEnvironment',
'AetherExecutionPhase',
'AetherFeaturizationMode',
'AetherFileBasedPathType',
'AetherForecastHorizonMode',
'AetherIdentityType',
'AetherLogVerbosity',
'AetherModuleDeploymentSource',
'AetherModuleHashVersion',
'AetherModuleType',
'AetherNCrossValidationMode',
'AetherParameterType',
'AetherParameterValueType',
'AetherPrimaryMetrics',
'AetherRepositoryType',
'AetherResourceOperator',
'AetherResourceValueType',
'AetherSamplingAlgorithmType',
'AetherSeasonalityMode',
'AetherShortSeriesHandlingConfiguration',
'AetherStackMetaLearnerType',
'AetherStoredProcedureParameterType',
'AetherTabularTrainingMode',
'AetherTargetAggregationFunction',
'AetherTargetLagsMode',
'AetherTargetRollingWindowSizeMode',
'AetherTaskType',
'AetherTrainingOutputType',
'AetherUIScriptLanguageEnum',
'AetherUIWidgetTypeEnum',
'AetherUploadState',
'AetherUseStl',
'ApplicationEndpointType',
'ArgumentValueType',
'AssetScopeTypes',
'AssetSourceType',
'AssetType',
'AutoDeleteCondition',
'BuildContextLocationType',
'Communicator',
'ComponentRegistrationTypeEnum',
'ComponentType',
'ComputeEnvironmentType',
'ComputeTargetType',
'ComputeType',
'ConfigValueType',
'ConnectionCategory',
'ConnectionScope',
'ConnectionSourceType',
'ConnectionType',
'ConsumeMode',
'ControlFlowType',
'ControlInputValue',
'DataBindingMode',
'DataCategory',
'DataCopyMode',
'DataLocationStorageType',
'DataPortType',
'DataReferenceType',
'DataSourceType',
'DataStoreMode',
'DataTransferStorageType',
'DataTransferTaskType',
'DataTypeMechanism',
'DatasetAccessModes',
'DatasetConsumptionType',
'DatasetDeliveryMechanism',
'DatasetOutputType',
'DatasetType',
'DeliveryMechanism',
'DistributionParameterEnum',
'DistributionType',
'EarlyTerminationPolicyType',
'EmailNotificationEnableType',
'EndpointAuthMode',
'EntityKind',
'EntityStatus',
'ErrorHandlingMode',
'ExecutionPhase',
'FeaturizationMode',
'FlowFeatureStateEnum',
'FlowLanguage',
'FlowPatchOperationType',
'FlowRunMode',
'FlowRunTypeEnum',
'FlowRuntimeSubmissionApiVersion',
'FlowTestMode',
'FlowType',
'ForecastHorizonMode',
'Framework',
'Frequency',
'GlobalJobDispatcherSupportedComputeType',
'GraphComponentsMode',
'GraphDatasetsLoadModes',
'GraphSdkCodeType',
'HttpStatusCode',
'IdentityType',
'InputType',
'IntellectualPropertyAccessMode',
'JobInputType',
'JobLimitsType',
'JobOutputType',
'JobProvisioningState',
'JobStatus',
'JobType',
'KeyType',
'ListViewType',
'LogLevel',
'LogVerbosity',
'LongRunningUpdateType',
'MLFlowAutologgerState',
'ManagedServiceIdentityType',
'MetricValueType',
'MfeInternalIdentityType',
'MfeInternalMLFlowAutologgerState',
'MfeInternalScheduleStatus',
'ModuleDtoFields',
'ModuleInfoFromYamlStatusEnum',
'ModuleRunSettingTypes',
'ModuleScope',
'ModuleSourceType',
'ModuleType',
'ModuleUpdateOperationType',
'ModuleWorkingMechanism',
'NCrossValidationMode',
'NodeCompositionMode',
'NodesValueType',
'Orientation',
'OutputMechanism',
'ParameterType',
'ParameterValueType',
'PipelineDraftMode',
'PipelineRunStatusCode',
'PipelineStatusCode',
'PipelineType',
'PortAction',
'PrimaryMetrics',
'ProvisioningState',
'RealTimeEndpointInternalStepCode',
'RealTimeEndpointOpCode',
'RealTimeEndpointOpStatusCode',
'RecurrenceFrequency',
'RunDisplayNameGenerationType',
'RunSettingParameterType',
'RunSettingUIWidgetTypeEnum',
'RunStatus',
'RunType',
'RuntimeStatusEnum',
'RuntimeType',
'SamplingAlgorithmType',
'ScheduleProvisioningStatus',
'ScheduleStatus',
'ScheduleType',
'ScopeType',
'ScriptType',
'SeasonalityMode',
'Section',
'SessionSetupModeEnum',
'SetupFlowSessionAction',
'SeverityLevel',
'ShortSeriesHandlingConfiguration',
'StackMetaLearnerType',
'StorageAuthType',
'StoredProcedureParameterType',
'SuccessfulCommandReturnCode',
'TabularTrainingMode',
'TargetAggregationFunction',
'TargetLagsMode',
'TargetRollingWindowSizeMode',
'TaskCreationOptions',
'TaskStatus',
'TaskStatusCode',
'TaskType',
'ToolFuncCallScenario',
'ToolState',
'ToolType',
'TrainingOutputType',
'TriggerOperationType',
'TriggerType',
'UIInputDataDeliveryMode',
'UIScriptLanguageEnum',
'UIWidgetTypeEnum',
'UploadState',
'UseStl',
'UserType',
'ValidationStatus',
'ValueType',
'VmPriority',
'WebServiceState',
'WeekDays',
'Weekday',
'YarnDeployMode',
]
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ACIAdvanceSettings(msrest.serialization.Model):
"""ACIAdvanceSettings.
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar ssl_enabled:
:vartype ssl_enabled: bool
:ivar ssl_certificate:
:vartype ssl_certificate: str
:ivar ssl_key:
:vartype ssl_key: str
:ivar c_name:
:vartype c_name: str
:ivar dns_name_label:
:vartype dns_name_label: str
"""
_attribute_map = {
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
'ssl_key': {'key': 'sslKey', 'type': 'str'},
'c_name': {'key': 'cName', 'type': 'str'},
'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword ssl_enabled:
:paramtype ssl_enabled: bool
:keyword ssl_certificate:
:paramtype ssl_certificate: str
:keyword ssl_key:
:paramtype ssl_key: str
:keyword c_name:
:paramtype c_name: str
:keyword dns_name_label:
:paramtype dns_name_label: str
"""
super(ACIAdvanceSettings, self).__init__(**kwargs)
self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.ssl_enabled = kwargs.get('ssl_enabled', None)
self.ssl_certificate = kwargs.get('ssl_certificate', None)
self.ssl_key = kwargs.get('ssl_key', None)
self.c_name = kwargs.get('c_name', None)
self.dns_name_label = kwargs.get('dns_name_label', None)
class Activate(msrest.serialization.Model):
"""Activate.
:ivar when:
:vartype when: str
:ivar is_property: Anything.
:vartype is_property: any
"""
_attribute_map = {
'when': {'key': 'when', 'type': 'str'},
'is_property': {'key': 'is', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword when:
:paramtype when: str
:keyword is_property: Anything.
:paramtype is_property: any
"""
super(Activate, self).__init__(**kwargs)
self.when = kwargs.get('when', None)
self.is_property = kwargs.get('is_property', None)
class AdditionalErrorInfo(msrest.serialization.Model):
"""AdditionalErrorInfo.
:ivar type:
:vartype type: str
:ivar info: Anything.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword info: Anything.
:paramtype info: any
"""
super(AdditionalErrorInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.info = kwargs.get('info', None)
class AdhocTriggerScheduledCommandJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledCommandJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledCommandJobRequest, self).__init__(**kwargs)
self.job_name = kwargs.get('job_name', None)
self.job_display_name = kwargs.get('job_display_name', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
class AdhocTriggerScheduledSparkJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledSparkJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledSparkJobRequest, self).__init__(**kwargs)
self.job_name = kwargs.get('job_name', None)
self.job_display_name = kwargs.get('job_display_name', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
class AetherAmlDataset(msrest.serialization.Model):
"""AetherAmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherAmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherAmlSparkCloudSetting(msrest.serialization.Model):
"""AetherAmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.AetherEntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.AetherResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'AetherEntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'AetherResourcesSetting'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.AetherEntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.AetherResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
"""
super(AetherAmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = kwargs.get('entry', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None)
self.conf = kwargs.get('conf', None)
self.compute = kwargs.get('compute', None)
self.resources = kwargs.get('resources', None)
self.identity = kwargs.get('identity', None)
class AetherAPCloudConfiguration(msrest.serialization.Model):
"""AetherAPCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
"""
super(AetherAPCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None)
self.user_alias = kwargs.get('user_alias', None)
self.aether_module_type = kwargs.get('aether_module_type', None)
class AetherArgumentAssignment(msrest.serialization.Model):
"""AetherArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.AetherArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[AetherArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.AetherArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherArgumentAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.value = kwargs.get('value', None)
self.nested_argument_list = kwargs.get('nested_argument_list', None)
self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None)
class AetherAssetDefinition(msrest.serialization.Model):
"""AetherAssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar initial_asset_id:
:vartype initial_asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'initial_asset_id': {'key': 'initialAssetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword initial_asset_id:
:paramtype initial_asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AetherAssetDefinition, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.type = kwargs.get('type', None)
self.asset_id = kwargs.get('asset_id', None)
self.initial_asset_id = kwargs.get('initial_asset_id', None)
self.serialized_asset_id = kwargs.get('serialized_asset_id', None)
class AetherAssetOutputSettings(msrest.serialization.Model):
"""AetherAssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'AetherParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherAssetOutputSettings, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None)
self.type = kwargs.get('type', None)
self.options = kwargs.get('options', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AetherAutoFeaturizeConfiguration(msrest.serialization.Model):
"""AetherAutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'AetherFeaturizationSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
super(AetherAutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = kwargs.get('featurization_config', None)
class AetherAutoMLComponentConfiguration(msrest.serialization.Model):
"""AetherAutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AetherAutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AetherAutoFeaturizeConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
super(AetherAutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = kwargs.get('auto_train_config', None)
self.auto_featurize_config = kwargs.get('auto_featurize_config', None)
class AetherAutoTrainConfiguration(msrest.serialization.Model):
"""AetherAutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.AetherGeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.AetherLimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.AetherDataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.AetherForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.AetherTrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.AetherSweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AetherComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AetherResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'AetherGeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'AetherLimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'AetherDataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'AetherForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'AetherTrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'AetherSweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AetherComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AetherResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.AetherGeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.AetherLimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.AetherDataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.AetherForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.AetherTrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.AetherSweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AetherComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AetherResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AetherAutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = kwargs.get('general_settings', None)
self.limit_settings = kwargs.get('limit_settings', None)
self.data_settings = kwargs.get('data_settings', None)
self.forecasting_settings = kwargs.get('forecasting_settings', None)
self.training_settings = kwargs.get('training_settings', None)
self.sweep_settings = kwargs.get('sweep_settings', None)
self.image_model_settings = kwargs.get('image_model_settings', None)
self.properties = kwargs.get('properties', None)
self.compute_configuration = kwargs.get('compute_configuration', None)
self.resource_configurtion = kwargs.get('resource_configurtion', None)
self.environment_id = kwargs.get('environment_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
class AetherAzureBlobReference(msrest.serialization.Model):
"""AetherAzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureBlobReference, self).__init__(**kwargs)
self.container = kwargs.get('container', None)
self.sas_token = kwargs.get('sas_token', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDatabaseReference(msrest.serialization.Model):
"""AetherAzureDatabaseReference.
:ivar server_uri:
:vartype server_uri: str
:ivar database_name:
:vartype database_name: str
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'server_uri': {'key': 'serverUri', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword server_uri:
:paramtype server_uri: str
:keyword database_name:
:paramtype database_name: str
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDatabaseReference, self).__init__(**kwargs)
self.server_uri = kwargs.get('server_uri', None)
self.database_name = kwargs.get('database_name', None)
self.table_name = kwargs.get('table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDataLakeGen2Reference(msrest.serialization.Model):
"""AetherAzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = kwargs.get('file_system_name', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureDataLakeReference(msrest.serialization.Model):
"""AetherAzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar data_lake_uri:
:vartype data_lake_uri: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'data_lake_uri': {'key': 'dataLakeUri', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword data_lake_uri:
:paramtype data_lake_uri: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeReference, self).__init__(**kwargs)
self.tenant = kwargs.get('tenant', None)
self.subscription = kwargs.get('subscription', None)
self.resource_group = kwargs.get('resource_group', None)
self.data_lake_uri = kwargs.get('data_lake_uri', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherAzureFilesReference(msrest.serialization.Model):
"""AetherAzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureFilesReference, self).__init__(**kwargs)
self.share = kwargs.get('share', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.path_type = kwargs.get('path_type', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherBatchAiComputeInfo(msrest.serialization.Model):
"""AetherBatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(AetherBatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None)
self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None)
self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None)
self.cluster_name = kwargs.get('cluster_name', None)
self.native_shared_directory = kwargs.get('native_shared_directory', None)
class AetherBuildArtifactInfo(msrest.serialization.Model):
"""AetherBuildArtifactInfo.
:ivar type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:vartype type: str or ~flow.models.AetherBuildSourceType
:ivar cloud_build_drop_path_info:
:vartype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:ivar vso_build_artifact_info:
:vartype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'cloud_build_drop_path_info': {'key': 'cloudBuildDropPathInfo', 'type': 'AetherCloudBuildDropPathInfo'},
'vso_build_artifact_info': {'key': 'vsoBuildArtifactInfo', 'type': 'AetherVsoBuildArtifactInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:paramtype type: str or ~flow.models.AetherBuildSourceType
:keyword cloud_build_drop_path_info:
:paramtype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:keyword vso_build_artifact_info:
:paramtype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
super(AetherBuildArtifactInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.cloud_build_drop_path_info = kwargs.get('cloud_build_drop_path_info', None)
self.vso_build_artifact_info = kwargs.get('vso_build_artifact_info', None)
class AetherCloudBuildDropPathInfo(msrest.serialization.Model):
"""AetherCloudBuildDropPathInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherCloudBuildInfo
:ivar root:
:vartype root: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherCloudBuildInfo'},
'root': {'key': 'root', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherCloudBuildInfo
:keyword root:
:paramtype root: str
"""
super(AetherCloudBuildDropPathInfo, self).__init__(**kwargs)
self.build_info = kwargs.get('build_info', None)
self.root = kwargs.get('root', None)
class AetherCloudBuildInfo(msrest.serialization.Model):
"""AetherCloudBuildInfo.
:ivar queue_info:
:vartype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:ivar build_id:
:vartype build_id: str
:ivar drop_url:
:vartype drop_url: str
"""
_attribute_map = {
'queue_info': {'key': 'queueInfo', 'type': 'AetherCloudBuildQueueInfo'},
'build_id': {'key': 'buildId', 'type': 'str'},
'drop_url': {'key': 'dropUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword queue_info:
:paramtype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:keyword build_id:
:paramtype build_id: str
:keyword drop_url:
:paramtype drop_url: str
"""
super(AetherCloudBuildInfo, self).__init__(**kwargs)
self.queue_info = kwargs.get('queue_info', None)
self.build_id = kwargs.get('build_id', None)
self.drop_url = kwargs.get('drop_url', None)
class AetherCloudBuildQueueInfo(msrest.serialization.Model):
"""AetherCloudBuildQueueInfo.
:ivar build_queue:
:vartype build_queue: str
:ivar build_role:
:vartype build_role: str
"""
_attribute_map = {
'build_queue': {'key': 'buildQueue', 'type': 'str'},
'build_role': {'key': 'buildRole', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_queue:
:paramtype build_queue: str
:keyword build_role:
:paramtype build_role: str
"""
super(AetherCloudBuildQueueInfo, self).__init__(**kwargs)
self.build_queue = kwargs.get('build_queue', None)
self.build_role = kwargs.get('build_role', None)
class AetherCloudPrioritySetting(msrest.serialization.Model):
"""AetherCloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.AetherPriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'AetherPriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'AetherPriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'AetherPriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'AetherPriorityConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.AetherPriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
super(AetherCloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = kwargs.get('scope_priority', None)
self.aml_compute_priority = kwargs.get('aml_compute_priority', None)
self.itp_priority = kwargs.get('itp_priority', None)
self.singularity_priority = kwargs.get('singularity_priority', None)
class AetherCloudSettings(msrest.serialization.Model):
"""AetherCloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.AetherParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.AetherPriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[AetherParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'AetherPriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'AetherHdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'AetherSubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AetherAutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'AetherAPCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'AetherScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'AetherEsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'AetherDataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AetherAmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'AetherDataTransferV2CloudSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.AetherParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.AetherPriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
super(AetherCloudSettings, self).__init__(**kwargs)
self.linked_settings = kwargs.get('linked_settings', None)
self.priority_config = kwargs.get('priority_config', None)
self.hdi_run_config = kwargs.get('hdi_run_config', None)
self.sub_graph_config = kwargs.get('sub_graph_config', None)
self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None)
self.ap_cloud_config = kwargs.get('ap_cloud_config', None)
self.scope_cloud_config = kwargs.get('scope_cloud_config', None)
self.es_cloud_config = kwargs.get('es_cloud_config', None)
self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None)
self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None)
self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None)
class AetherColumnTransformer(msrest.serialization.Model):
"""AetherColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(AetherColumnTransformer, self).__init__(**kwargs)
self.fields = kwargs.get('fields', None)
self.parameters = kwargs.get('parameters', None)
class AetherComputeConfiguration(msrest.serialization.Model):
"""AetherComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AetherComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.is_preemptable = kwargs.get('is_preemptable', None)
class AetherComputeSetting(msrest.serialization.Model):
"""AetherComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.AetherComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'AetherBatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'AetherRemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'AetherHdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'AetherMlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'AetherDatabricksComputeInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.AetherComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
super(AetherComputeSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None)
self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None)
self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None)
self.mlc_compute_info = kwargs.get('mlc_compute_info', None)
self.databricks_compute_info = kwargs.get('databricks_compute_info', None)
class AetherControlInput(msrest.serialization.Model):
"""AetherControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.AetherControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.AetherControlInputValue
"""
super(AetherControlInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.default_value = kwargs.get('default_value', None)
class AetherControlOutput(msrest.serialization.Model):
"""AetherControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(AetherControlOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class AetherCopyDataTask(msrest.serialization.Model):
"""AetherCopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherCopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class AetherCosmosReference(msrest.serialization.Model):
"""AetherCosmosReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCosmosReference, self).__init__(**kwargs)
self.cluster = kwargs.get('cluster', None)
self.vc = kwargs.get('vc', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherCreatedBy(msrest.serialization.Model):
"""AetherCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar puid:
:vartype puid: str
:ivar iss:
:vartype iss: str
:ivar idp:
:vartype idp: str
:ivar altsec_id:
:vartype altsec_id: str
:ivar source_ip:
:vartype source_ip: str
:ivar skip_registry_private_link_check:
:vartype skip_registry_private_link_check: bool
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'puid': {'key': 'puid', 'type': 'str'},
'iss': {'key': 'iss', 'type': 'str'},
'idp': {'key': 'idp', 'type': 'str'},
'altsec_id': {'key': 'altsecId', 'type': 'str'},
'source_ip': {'key': 'sourceIp', 'type': 'str'},
'skip_registry_private_link_check': {'key': 'skipRegistryPrivateLinkCheck', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword puid:
:paramtype puid: str
:keyword iss:
:paramtype iss: str
:keyword idp:
:paramtype idp: str
:keyword altsec_id:
:paramtype altsec_id: str
:keyword source_ip:
:paramtype source_ip: str
:keyword skip_registry_private_link_check:
:paramtype skip_registry_private_link_check: bool
"""
super(AetherCreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.puid = kwargs.get('puid', None)
self.iss = kwargs.get('iss', None)
self.idp = kwargs.get('idp', None)
self.altsec_id = kwargs.get('altsec_id', None)
self.source_ip = kwargs.get('source_ip', None)
self.skip_registry_private_link_check = kwargs.get('skip_registry_private_link_check', None)
class AetherCustomReference(msrest.serialization.Model):
"""AetherCustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCustomReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherDatabaseSink(msrest.serialization.Model):
"""AetherDatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(AetherDatabaseSink, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.table = kwargs.get('table', None)
class AetherDatabaseSource(msrest.serialization.Model):
"""AetherDatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherDatabaseSource, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.query = kwargs.get('query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
class AetherDatabricksComputeInfo(msrest.serialization.Model):
"""AetherDatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(AetherDatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = kwargs.get('existing_cluster_id', None)
class AetherDataLocation(msrest.serialization.Model):
"""AetherDataLocation.
:ivar storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.AetherDataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.AetherDataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AetherAmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
:ivar is_compliant:
:vartype is_compliant: bool
:ivar reuse_calculation_fields:
:vartype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'AetherDataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AetherAmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
'is_compliant': {'key': 'isCompliant', 'type': 'bool'},
'reuse_calculation_fields': {'key': 'reuseCalculationFields', 'type': 'AetherDataLocationReuseCalculationFields'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.AetherDataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.AetherDataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AetherAmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
:keyword is_compliant:
:paramtype is_compliant: bool
:keyword reuse_calculation_fields:
:paramtype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
super(AetherDataLocation, self).__init__(**kwargs)
self.storage_type = kwargs.get('storage_type', None)
self.storage_id = kwargs.get('storage_id', None)
self.uri = kwargs.get('uri', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference = kwargs.get('data_reference', None)
self.aml_dataset = kwargs.get('aml_dataset', None)
self.asset_definition = kwargs.get('asset_definition', None)
self.is_compliant = kwargs.get('is_compliant', None)
self.reuse_calculation_fields = kwargs.get('reuse_calculation_fields', None)
class AetherDataLocationReuseCalculationFields(msrest.serialization.Model):
"""AetherDataLocationReuseCalculationFields.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar data_experiment_id:
:vartype data_experiment_id: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'data_experiment_id': {'key': 'dataExperimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword data_experiment_id:
:paramtype data_experiment_id: str
"""
super(AetherDataLocationReuseCalculationFields, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.data_experiment_id = kwargs.get('data_experiment_id', None)
class AetherDataPath(msrest.serialization.Model):
"""AetherDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.AetherSqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'AetherSqlDataPath'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.AetherSqlDataPath
"""
super(AetherDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.sql_data_path = kwargs.get('sql_data_path', None)
class AetherDataReference(msrest.serialization.Model):
"""AetherDataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:vartype type: str or ~flow.models.AetherDataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AetherAzureFilesReference
:ivar cosmos_reference:
:vartype cosmos_reference: ~flow.models.AetherCosmosReference
:ivar philly_hdfs_reference:
:vartype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.AetherDBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.AetherCustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.AetherHdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AetherAzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AetherAzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AetherAzureFilesReference'},
'cosmos_reference': {'key': 'cosmosReference', 'type': 'AetherCosmosReference'},
'philly_hdfs_reference': {'key': 'phillyHdfsReference', 'type': 'AetherPhillyHdfsReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AetherAzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'AetherDBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'AetherCustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'AetherHdfsReference'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype type: str or ~flow.models.AetherDataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AetherAzureFilesReference
:keyword cosmos_reference:
:paramtype cosmos_reference: ~flow.models.AetherCosmosReference
:keyword philly_hdfs_reference:
:paramtype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.AetherDBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.AetherCustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.AetherHdfsReference
"""
super(AetherDataReference, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.azure_blob_reference = kwargs.get('azure_blob_reference', None)
self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None)
self.azure_files_reference = kwargs.get('azure_files_reference', None)
self.cosmos_reference = kwargs.get('cosmos_reference', None)
self.philly_hdfs_reference = kwargs.get('philly_hdfs_reference', None)
self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None)
self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None)
self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None)
self.dbfs_reference = kwargs.get('dbfs_reference', None)
self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None)
self.custom_reference = kwargs.get('custom_reference', None)
self.hdfs_reference = kwargs.get('hdfs_reference', None)
class AetherDataSetDefinition(msrest.serialization.Model):
"""AetherDataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.AetherDataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'AetherDataSetDefinitionValue'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.AetherDataSetDefinitionValue
"""
super(AetherDataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = kwargs.get('data_type_short_name', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.value = kwargs.get('value', None)
class AetherDataSetDefinitionValue(msrest.serialization.Model):
"""AetherDataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.AetherDataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'AetherDataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.AetherDataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
"""
super(AetherDataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = kwargs.get('literal_value', None)
self.data_set_reference = kwargs.get('data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.asset_definition = kwargs.get('asset_definition', None)
class AetherDatasetOutput(msrest.serialization.Model):
"""AetherDatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.AetherDatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.AetherDatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
super(AetherDatasetOutput, self).__init__(**kwargs)
self.dataset_type = kwargs.get('dataset_type', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
class AetherDatasetOutputOptions(msrest.serialization.Model):
"""AetherDatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.AetherGlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'AetherGlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.AetherGlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
super(AetherDatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = kwargs.get('source_globs', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None)
class AetherDatasetRegistration(msrest.serialization.Model):
"""AetherDatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherDatasetRegistration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.create_new_version = kwargs.get('create_new_version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherDataSettings(msrest.serialization.Model):
"""AetherDataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.AetherValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.AetherTestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'AetherValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'AetherTestDataSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.AetherValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.AetherTestDataSettings
"""
super(AetherDataSettings, self).__init__(**kwargs)
self.target_column_name = kwargs.get('target_column_name', None)
self.weight_column_name = kwargs.get('weight_column_name', None)
self.positive_label = kwargs.get('positive_label', None)
self.validation_data = kwargs.get('validation_data', None)
self.test_data = kwargs.get('test_data', None)
class AetherDatastoreSetting(msrest.serialization.Model):
"""AetherDatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(AetherDatastoreSetting, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
class AetherDataTransferCloudConfiguration(msrest.serialization.Model):
"""AetherDataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(AetherDataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
class AetherDataTransferSink(msrest.serialization.Model):
"""AetherDataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.AetherDatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'AetherDatabaseSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.AetherDatabaseSink
"""
super(AetherDataTransferSink, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_sink = kwargs.get('database_sink', None)
class AetherDataTransferSource(msrest.serialization.Model):
"""AetherDataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_source:
:vartype database_source: ~flow.models.AetherDatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'AetherDatabaseSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.AetherDatabaseSource
"""
super(AetherDataTransferSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_source = kwargs.get('database_source', None)
class AetherDataTransferV2CloudSetting(msrest.serialization.Model):
"""AetherDataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.AetherDataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.AetherCopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.AetherImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.AetherExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'AetherCopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'AetherImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'AetherExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{AetherDataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{AetherDataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.AetherDataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.AetherCopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.AetherImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.AetherExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherDataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = kwargs.get('task_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.copy_data_task = kwargs.get('copy_data_task', None)
self.import_data_task = kwargs.get('import_data_task', None)
self.export_data_task = kwargs.get('export_data_task', None)
self.data_transfer_sources = kwargs.get('data_transfer_sources', None)
self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class AetherDBFSReference(msrest.serialization.Model):
"""AetherDBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherDBFSReference, self).__init__(**kwargs)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AetherDockerSettingConfiguration(msrest.serialization.Model):
"""AetherDockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(AetherDockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.shm_size = kwargs.get('shm_size', None)
self.arguments = kwargs.get('arguments', None)
class AetherDoWhileControlFlowInfo(msrest.serialization.Model):
"""AetherDoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'AetherDoWhileControlFlowRunSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
super(AetherDoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None)
self.condition_output_port_name = kwargs.get('condition_output_port_name', None)
self.run_settings = kwargs.get('run_settings', None)
class AetherDoWhileControlFlowRunSettings(msrest.serialization.Model):
"""AetherDoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
super(AetherDoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None)
class AetherEntityInterfaceDocumentation(msrest.serialization.Model):
"""AetherEntityInterfaceDocumentation.
:ivar inputs_documentation: Dictionary of :code:`<string>`.
:vartype inputs_documentation: dict[str, str]
:ivar outputs_documentation: Dictionary of :code:`<string>`.
:vartype outputs_documentation: dict[str, str]
:ivar parameters_documentation: Dictionary of :code:`<string>`.
:vartype parameters_documentation: dict[str, str]
"""
_attribute_map = {
'inputs_documentation': {'key': 'inputsDocumentation', 'type': '{str}'},
'outputs_documentation': {'key': 'outputsDocumentation', 'type': '{str}'},
'parameters_documentation': {'key': 'parametersDocumentation', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs_documentation: Dictionary of :code:`<string>`.
:paramtype inputs_documentation: dict[str, str]
:keyword outputs_documentation: Dictionary of :code:`<string>`.
:paramtype outputs_documentation: dict[str, str]
:keyword parameters_documentation: Dictionary of :code:`<string>`.
:paramtype parameters_documentation: dict[str, str]
"""
super(AetherEntityInterfaceDocumentation, self).__init__(**kwargs)
self.inputs_documentation = kwargs.get('inputs_documentation', None)
self.outputs_documentation = kwargs.get('outputs_documentation', None)
self.parameters_documentation = kwargs.get('parameters_documentation', None)
class AetherEntrySetting(msrest.serialization.Model):
"""AetherEntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(AetherEntrySetting, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class AetherEnvironmentConfiguration(msrest.serialization.Model):
"""AetherEnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(AetherEnvironmentConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.use_environment_definition = kwargs.get('use_environment_definition', None)
self.environment_definition_string = kwargs.get('environment_definition_string', None)
class AetherEsCloudConfiguration(msrest.serialization.Model):
"""AetherEsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar aml_compute_priority_internal:
:vartype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority_internal:
:vartype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority_internal:
:vartype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar environment:
:vartype environment: ~flow.models.AetherEnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.AetherK8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AetherResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.AetherDockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'aml_compute_priority_internal': {'key': 'amlComputePriorityInternal', 'type': 'AetherPriorityConfiguration'},
'itp_priority_internal': {'key': 'itpPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'singularity_priority_internal': {'key': 'singularityPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'environment': {'key': 'environment', 'type': 'AetherEnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'AetherHyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'AetherK8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AetherResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'AetherTorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'AetherTargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'AetherDockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword aml_compute_priority_internal:
:paramtype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority_internal:
:paramtype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority_internal:
:paramtype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.AetherEnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.AetherK8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AetherResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.AetherDockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(AetherEsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None)
self.aml_compute_priority_internal = kwargs.get('aml_compute_priority_internal', None)
self.itp_priority_internal = kwargs.get('itp_priority_internal', None)
self.singularity_priority_internal = kwargs.get('singularity_priority_internal', None)
self.environment = kwargs.get('environment', None)
self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None)
self.k8_s_config = kwargs.get('k8_s_config', None)
self.resource_config = kwargs.get('resource_config', None)
self.torch_distributed_config = kwargs.get('torch_distributed_config', None)
self.target_selector_config = kwargs.get('target_selector_config', None)
self.docker_config = kwargs.get('docker_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.identity = kwargs.get('identity', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.run_config = kwargs.get('run_config', None)
class AetherExportDataTask(msrest.serialization.Model):
"""AetherExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'AetherDataTransferSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
super(AetherExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = kwargs.get('data_transfer_sink', None)
class AetherFeaturizationSettings(msrest.serialization.Model):
"""AetherFeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.AetherFeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[AetherColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.AetherFeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(AetherFeaturizationSettings, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.blocked_transformers = kwargs.get('blocked_transformers', None)
self.column_purposes = kwargs.get('column_purposes', None)
self.drop_columns = kwargs.get('drop_columns', None)
self.transformer_params = kwargs.get('transformer_params', None)
self.dataset_language = kwargs.get('dataset_language', None)
self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None)
class AetherFileSystem(msrest.serialization.Model):
"""AetherFileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(AetherFileSystem, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.path = kwargs.get('path', None)
class AetherForecastHorizon(msrest.serialization.Model):
"""AetherForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(AetherForecastHorizon, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherForecastingSettings(msrest.serialization.Model):
"""AetherForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.AetherTargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.AetherForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.AetherSeasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.AetherUseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'AetherTargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'AetherTargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'AetherForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'AetherSeasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.AetherTargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.AetherForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.AetherSeasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.AetherUseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(AetherForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None)
self.time_column_name = kwargs.get('time_column_name', None)
self.target_lags = kwargs.get('target_lags', None)
self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None)
self.forecast_horizon = kwargs.get('forecast_horizon', None)
self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None)
self.frequency = kwargs.get('frequency', None)
self.feature_lags = kwargs.get('feature_lags', None)
self.seasonality = kwargs.get('seasonality', None)
self.short_series_handling_config = kwargs.get('short_series_handling_config', None)
self.use_stl = kwargs.get('use_stl', None)
self.target_aggregate_function = kwargs.get('target_aggregate_function', None)
self.cv_step_size = kwargs.get('cv_step_size', None)
self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None)
class AetherGeneralSettings(msrest.serialization.Model):
"""AetherGeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.AetherTaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.AetherTaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
super(AetherGeneralSettings, self).__init__(**kwargs)
self.primary_metric = kwargs.get('primary_metric', None)
self.task_type = kwargs.get('task_type', None)
self.log_verbosity = kwargs.get('log_verbosity', None)
class AetherGlobsOptions(msrest.serialization.Model):
"""AetherGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(AetherGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class AetherGraphControlNode(msrest.serialization.Model):
"""AetherGraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.AetherParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'AetherParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.AetherParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.control_type = kwargs.get('control_type', None)
self.control_parameter = kwargs.get('control_parameter', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphControlReferenceNode(msrest.serialization.Model):
"""AetherGraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.AetherControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'AetherDoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'AetherParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.AetherControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlReferenceNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.comment = kwargs.get('comment', None)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.reference_node_id = kwargs.get('reference_node_id', None)
self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None)
self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphDatasetNode(msrest.serialization.Model):
"""AetherGraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'AetherDataSetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
super(AetherGraphDatasetNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None)
self.data_set_definition = kwargs.get('data_set_definition', None)
class AetherGraphEdge(msrest.serialization.Model):
"""AetherGraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.AetherPortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.AetherPortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'AetherPortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'AetherPortInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.AetherPortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.AetherPortInfo
"""
super(AetherGraphEdge, self).__init__(**kwargs)
self.source_output_port = kwargs.get('source_output_port', None)
self.destination_input_port = kwargs.get('destination_input_port', None)
class AetherGraphEntity(msrest.serialization.Model):
"""AetherGraphEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.AetherGraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.AetherGraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.AetherGraphEdge]
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[AetherGraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[AetherGraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[AetherGraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[AetherGraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[AetherGraphControlNode]'},
'edges': {'key': 'edges', 'type': '[AetherGraphEdge]'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.AetherGraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.AetherGraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.AetherGraphEdge]
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherGraphEntity, self).__init__(**kwargs)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.etag = kwargs.get('etag', None)
self.tags = kwargs.get('tags', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.entity_status = kwargs.get('entity_status', None)
class AetherGraphModuleNode(msrest.serialization.Model):
"""AetherGraphModuleNode.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar default_data_retention_hint:
:vartype default_data_retention_hint: int
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar euclid_workspace_id:
:vartype euclid_workspace_id: str
:ivar attached_modules:
:vartype attached_modules: list[str]
:ivar acceptable_machine_clusters:
:vartype acceptable_machine_clusters: list[str]
:ivar custom_data_location_id:
:vartype custom_data_location_id: str
:ivar alert_timeout_duration:
:vartype alert_timeout_duration: str
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'default_data_retention_hint': {'key': 'defaultDataRetentionHint', 'type': 'int'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'euclid_workspace_id': {'key': 'euclidWorkspaceId', 'type': 'str'},
'attached_modules': {'key': 'attachedModules', 'type': '[str]'},
'acceptable_machine_clusters': {'key': 'acceptableMachineClusters', 'type': '[str]'},
'custom_data_location_id': {'key': 'customDataLocationId', 'type': 'str'},
'alert_timeout_duration': {'key': 'alertTimeoutDuration', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword default_data_retention_hint:
:paramtype default_data_retention_hint: int
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword euclid_workspace_id:
:paramtype euclid_workspace_id: str
:keyword attached_modules:
:paramtype attached_modules: list[str]
:keyword acceptable_machine_clusters:
:paramtype acceptable_machine_clusters: list[str]
:keyword custom_data_location_id:
:paramtype custom_data_location_id: str
:keyword alert_timeout_duration:
:paramtype alert_timeout_duration: str
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphModuleNode, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.default_data_retention_hint = kwargs.get('default_data_retention_hint', None)
self.compliance_cluster = kwargs.get('compliance_cluster', None)
self.euclid_workspace_id = kwargs.get('euclid_workspace_id', None)
self.attached_modules = kwargs.get('attached_modules', None)
self.acceptable_machine_clusters = kwargs.get('acceptable_machine_clusters', None)
self.custom_data_location_id = kwargs.get('custom_data_location_id', None)
self.alert_timeout_duration = kwargs.get('alert_timeout_duration', None)
self.runconfig = kwargs.get('runconfig', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherGraphReferenceNode(msrest.serialization.Model):
"""AetherGraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphReferenceNode, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class AetherHdfsReference(msrest.serialization.Model):
"""AetherHdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherHdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherHdiClusterComputeInfo(msrest.serialization.Model):
"""AetherHdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherHdiClusterComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class AetherHdiRunConfiguration(msrest.serialization.Model):
"""AetherHdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(AetherHdiRunConfiguration, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.compute_name = kwargs.get('compute_name', None)
self.queue = kwargs.get('queue', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.conf = kwargs.get('conf', None)
self.name = kwargs.get('name', None)
class AetherHyperDriveConfiguration(msrest.serialization.Model):
"""AetherHyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherHyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None)
self.primary_metric_goal = kwargs.get('primary_metric_goal', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.arguments = kwargs.get('arguments', None)
class AetherIdentitySetting(msrest.serialization.Model):
"""AetherIdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AetherIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AetherIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(AetherIdentitySetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.client_id = kwargs.get('client_id', None)
self.object_id = kwargs.get('object_id', None)
self.msi_resource_id = kwargs.get('msi_resource_id', None)
class AetherImportDataTask(msrest.serialization.Model):
"""AetherImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'AetherDataTransferSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
super(AetherImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = kwargs.get('data_transfer_source', None)
class AetherInputSetting(msrest.serialization.Model):
"""AetherInputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherInputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.options = kwargs.get('options', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherInteractiveConfig(msrest.serialization.Model):
"""AetherInteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(AetherInteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class AetherK8SConfiguration(msrest.serialization.Model):
"""AetherK8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.AetherResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AetherPriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'AetherResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AetherPriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'AetherInteractiveConfig'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.AetherResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AetherPriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
super(AetherK8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class AetherLegacyDataPath(msrest.serialization.Model):
"""AetherLegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherLegacyDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherLimitSettings(msrest.serialization.Model):
"""AetherLimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(AetherLimitSettings, self).__init__(**kwargs)
self.max_trials = kwargs.get('max_trials', None)
self.timeout = kwargs.get('timeout', None)
self.trial_timeout = kwargs.get('trial_timeout', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None)
self.exit_score = kwargs.get('exit_score', None)
self.enable_early_termination = kwargs.get('enable_early_termination', None)
self.max_nodes = kwargs.get('max_nodes', None)
class AetherMlcComputeInfo(msrest.serialization.Model):
"""AetherMlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(AetherMlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
class AetherModuleEntity(msrest.serialization.Model):
"""AetherModuleEntity.
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.AetherCreatedBy
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.AetherModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar resource_requirements:
:vartype resource_requirements: ~flow.models.AetherResourceModel
:ivar machine_cluster:
:vartype machine_cluster: list[str]
:ivar default_compliance_cluster:
:vartype default_compliance_cluster: str
:ivar repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:vartype repository_type: str or ~flow.models.AetherRepositoryType
:ivar relative_path_to_source_code:
:vartype relative_path_to_source_code: str
:ivar commit_id:
:vartype commit_id: str
:ivar code_review_link:
:vartype code_review_link: str
:ivar unit_tests_available:
:vartype unit_tests_available: bool
:ivar is_compressed:
:vartype is_compressed: bool
:ivar execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:vartype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:ivar is_output_markup_enabled:
:vartype is_output_markup_enabled: bool
:ivar docker_image_id:
:vartype docker_image_id: str
:ivar docker_image_reference:
:vartype docker_image_reference: str
:ivar docker_image_security_groups:
:vartype docker_image_security_groups: str
:ivar extended_properties:
:vartype extended_properties: ~flow.models.AetherModuleExtendedProperties
:ivar deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:vartype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:ivar deployment_source_metadata:
:vartype deployment_source_metadata: str
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar kv_tags: This is a dictionary.
:vartype kv_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.AetherCreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.AetherUploadState
:ivar source_code_location:
:vartype source_code_location: str
:ivar size_in_bytes:
:vartype size_in_bytes: long
:ivar download_location:
:vartype download_location: str
:ivar data_location:
:vartype data_location: ~flow.models.AetherDataLocation
:ivar scripting_runtime_id:
:vartype scripting_runtime_id: str
:ivar interface_documentation:
:vartype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:ivar is_eyes_on:
:vartype is_eyes_on: bool
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar information_url:
:vartype information_url: str
:ivar is_experiment_id_in_parameters:
:vartype is_experiment_id_in_parameters: bool
:ivar interface_string:
:vartype interface_string: str
:ivar default_parameters: This is a dictionary.
:vartype default_parameters: dict[str, str]
:ivar structured_interface:
:vartype structured_interface: ~flow.models.AetherStructuredInterface
:ivar family_id:
:vartype family_id: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar version:
:vartype version: str
:ivar sequence_number_in_family:
:vartype sequence_number_in_family: int
:ivar owner:
:vartype owner: str
:ivar azure_tenant_id:
:vartype azure_tenant_id: str
:ivar azure_user_id:
:vartype azure_user_id: str
:ivar collaborators:
:vartype collaborators: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'AetherCreatedBy'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'resource_requirements': {'key': 'resourceRequirements', 'type': 'AetherResourceModel'},
'machine_cluster': {'key': 'machineCluster', 'type': '[str]'},
'default_compliance_cluster': {'key': 'defaultComplianceCluster', 'type': 'str'},
'repository_type': {'key': 'repositoryType', 'type': 'str'},
'relative_path_to_source_code': {'key': 'relativePathToSourceCode', 'type': 'str'},
'commit_id': {'key': 'commitId', 'type': 'str'},
'code_review_link': {'key': 'codeReviewLink', 'type': 'str'},
'unit_tests_available': {'key': 'unitTestsAvailable', 'type': 'bool'},
'is_compressed': {'key': 'isCompressed', 'type': 'bool'},
'execution_environment': {'key': 'executionEnvironment', 'type': 'str'},
'is_output_markup_enabled': {'key': 'isOutputMarkupEnabled', 'type': 'bool'},
'docker_image_id': {'key': 'dockerImageId', 'type': 'str'},
'docker_image_reference': {'key': 'dockerImageReference', 'type': 'str'},
'docker_image_security_groups': {'key': 'dockerImageSecurityGroups', 'type': 'str'},
'extended_properties': {'key': 'extendedProperties', 'type': 'AetherModuleExtendedProperties'},
'deployment_source': {'key': 'deploymentSource', 'type': 'str'},
'deployment_source_metadata': {'key': 'deploymentSourceMetadata', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'AetherCreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'source_code_location': {'key': 'sourceCodeLocation', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'long'},
'download_location': {'key': 'downloadLocation', 'type': 'str'},
'data_location': {'key': 'dataLocation', 'type': 'AetherDataLocation'},
'scripting_runtime_id': {'key': 'scriptingRuntimeId', 'type': 'str'},
'interface_documentation': {'key': 'interfaceDocumentation', 'type': 'AetherEntityInterfaceDocumentation'},
'is_eyes_on': {'key': 'isEyesOn', 'type': 'bool'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'information_url': {'key': 'informationUrl', 'type': 'str'},
'is_experiment_id_in_parameters': {'key': 'isExperimentIdInParameters', 'type': 'bool'},
'interface_string': {'key': 'interfaceString', 'type': 'str'},
'default_parameters': {'key': 'defaultParameters', 'type': '{str}'},
'structured_interface': {'key': 'structuredInterface', 'type': 'AetherStructuredInterface'},
'family_id': {'key': 'familyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'sequence_number_in_family': {'key': 'sequenceNumberInFamily', 'type': 'int'},
'owner': {'key': 'owner', 'type': 'str'},
'azure_tenant_id': {'key': 'azureTenantId', 'type': 'str'},
'azure_user_id': {'key': 'azureUserId', 'type': 'str'},
'collaborators': {'key': 'collaborators', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.AetherCreatedBy
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.AetherModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword resource_requirements:
:paramtype resource_requirements: ~flow.models.AetherResourceModel
:keyword machine_cluster:
:paramtype machine_cluster: list[str]
:keyword default_compliance_cluster:
:paramtype default_compliance_cluster: str
:keyword repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:paramtype repository_type: str or ~flow.models.AetherRepositoryType
:keyword relative_path_to_source_code:
:paramtype relative_path_to_source_code: str
:keyword commit_id:
:paramtype commit_id: str
:keyword code_review_link:
:paramtype code_review_link: str
:keyword unit_tests_available:
:paramtype unit_tests_available: bool
:keyword is_compressed:
:paramtype is_compressed: bool
:keyword execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:paramtype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:keyword is_output_markup_enabled:
:paramtype is_output_markup_enabled: bool
:keyword docker_image_id:
:paramtype docker_image_id: str
:keyword docker_image_reference:
:paramtype docker_image_reference: str
:keyword docker_image_security_groups:
:paramtype docker_image_security_groups: str
:keyword extended_properties:
:paramtype extended_properties: ~flow.models.AetherModuleExtendedProperties
:keyword deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:paramtype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:keyword deployment_source_metadata:
:paramtype deployment_source_metadata: str
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword kv_tags: This is a dictionary.
:paramtype kv_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.AetherCreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.AetherUploadState
:keyword source_code_location:
:paramtype source_code_location: str
:keyword size_in_bytes:
:paramtype size_in_bytes: long
:keyword download_location:
:paramtype download_location: str
:keyword data_location:
:paramtype data_location: ~flow.models.AetherDataLocation
:keyword scripting_runtime_id:
:paramtype scripting_runtime_id: str
:keyword interface_documentation:
:paramtype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:keyword is_eyes_on:
:paramtype is_eyes_on: bool
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword information_url:
:paramtype information_url: str
:keyword is_experiment_id_in_parameters:
:paramtype is_experiment_id_in_parameters: bool
:keyword interface_string:
:paramtype interface_string: str
:keyword default_parameters: This is a dictionary.
:paramtype default_parameters: dict[str, str]
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.AetherStructuredInterface
:keyword family_id:
:paramtype family_id: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword version:
:paramtype version: str
:keyword sequence_number_in_family:
:paramtype sequence_number_in_family: int
:keyword owner:
:paramtype owner: str
:keyword azure_tenant_id:
:paramtype azure_tenant_id: str
:keyword azure_user_id:
:paramtype azure_user_id: str
:keyword collaborators:
:paramtype collaborators: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherModuleEntity, self).__init__(**kwargs)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.display_name = kwargs.get('display_name', None)
self.module_execution_type = kwargs.get('module_execution_type', None)
self.module_type = kwargs.get('module_type', None)
self.module_type_version = kwargs.get('module_type_version', None)
self.resource_requirements = kwargs.get('resource_requirements', None)
self.machine_cluster = kwargs.get('machine_cluster', None)
self.default_compliance_cluster = kwargs.get('default_compliance_cluster', None)
self.repository_type = kwargs.get('repository_type', None)
self.relative_path_to_source_code = kwargs.get('relative_path_to_source_code', None)
self.commit_id = kwargs.get('commit_id', None)
self.code_review_link = kwargs.get('code_review_link', None)
self.unit_tests_available = kwargs.get('unit_tests_available', None)
self.is_compressed = kwargs.get('is_compressed', None)
self.execution_environment = kwargs.get('execution_environment', None)
self.is_output_markup_enabled = kwargs.get('is_output_markup_enabled', None)
self.docker_image_id = kwargs.get('docker_image_id', None)
self.docker_image_reference = kwargs.get('docker_image_reference', None)
self.docker_image_security_groups = kwargs.get('docker_image_security_groups', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.deployment_source = kwargs.get('deployment_source', None)
self.deployment_source_metadata = kwargs.get('deployment_source_metadata', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
self.kv_tags = kwargs.get('kv_tags', None)
self.properties = kwargs.get('properties', None)
self.created_by = kwargs.get('created_by', None)
self.runconfig = kwargs.get('runconfig', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.category = kwargs.get('category', None)
self.step_type = kwargs.get('step_type', None)
self.stage = kwargs.get('stage', None)
self.upload_state = kwargs.get('upload_state', None)
self.source_code_location = kwargs.get('source_code_location', None)
self.size_in_bytes = kwargs.get('size_in_bytes', None)
self.download_location = kwargs.get('download_location', None)
self.data_location = kwargs.get('data_location', None)
self.scripting_runtime_id = kwargs.get('scripting_runtime_id', None)
self.interface_documentation = kwargs.get('interface_documentation', None)
self.is_eyes_on = kwargs.get('is_eyes_on', None)
self.compliance_cluster = kwargs.get('compliance_cluster', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.information_url = kwargs.get('information_url', None)
self.is_experiment_id_in_parameters = kwargs.get('is_experiment_id_in_parameters', None)
self.interface_string = kwargs.get('interface_string', None)
self.default_parameters = kwargs.get('default_parameters', None)
self.structured_interface = kwargs.get('structured_interface', None)
self.family_id = kwargs.get('family_id', None)
self.name = kwargs.get('name', None)
self.hash = kwargs.get('hash', None)
self.description = kwargs.get('description', None)
self.version = kwargs.get('version', None)
self.sequence_number_in_family = kwargs.get('sequence_number_in_family', None)
self.owner = kwargs.get('owner', None)
self.azure_tenant_id = kwargs.get('azure_tenant_id', None)
self.azure_user_id = kwargs.get('azure_user_id', None)
self.collaborators = kwargs.get('collaborators', None)
self.id = kwargs.get('id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.etag = kwargs.get('etag', None)
self.tags = kwargs.get('tags', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.entity_status = kwargs.get('entity_status', None)
class AetherModuleExtendedProperties(msrest.serialization.Model):
"""AetherModuleExtendedProperties.
:ivar auto_deployed_artifact:
:vartype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:ivar script_needs_approval:
:vartype script_needs_approval: bool
"""
_attribute_map = {
'auto_deployed_artifact': {'key': 'autoDeployedArtifact', 'type': 'AetherBuildArtifactInfo'},
'script_needs_approval': {'key': 'scriptNeedsApproval', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_deployed_artifact:
:paramtype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:keyword script_needs_approval:
:paramtype script_needs_approval: bool
"""
super(AetherModuleExtendedProperties, self).__init__(**kwargs)
self.auto_deployed_artifact = kwargs.get('auto_deployed_artifact', None)
self.script_needs_approval = kwargs.get('script_needs_approval', None)
class AetherNCrossValidations(msrest.serialization.Model):
"""AetherNCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherNCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherNCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(AetherNCrossValidations, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherOutputSetting(msrest.serialization.Model):
"""AetherOutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'AetherParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'AetherParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'AetherParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(AetherOutputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.web_service_port = kwargs.get('web_service_port', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None)
class AetherParallelForControlFlowInfo(msrest.serialization.Model):
"""AetherParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
super(AetherParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None)
class AetherParameterAssignment(msrest.serialization.Model):
"""AetherParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.AetherParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.AetherLegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[AetherParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'AetherLegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'AetherDataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.AetherParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.AetherLegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(AetherParameterAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class AetherPhillyHdfsReference(msrest.serialization.Model):
"""AetherPhillyHdfsReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherPhillyHdfsReference, self).__init__(**kwargs)
self.cluster = kwargs.get('cluster', None)
self.vc = kwargs.get('vc', None)
self.relative_path = kwargs.get('relative_path', None)
class AetherPortInfo(msrest.serialization.Model):
"""AetherPortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(AetherPortInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.graph_port_name = kwargs.get('graph_port_name', None)
self.is_parameter = kwargs.get('is_parameter', None)
self.web_service_port = kwargs.get('web_service_port', None)
class AetherPriorityConfig(msrest.serialization.Model):
"""AetherPriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AetherPriorityConfig, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class AetherPriorityConfiguration(msrest.serialization.Model):
"""AetherPriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(AetherPriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.string_type_priority = kwargs.get('string_type_priority', None)
class AetherRegisteredDataSetReference(msrest.serialization.Model):
"""AetherRegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherRegisteredDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AetherRemoteDockerComputeInfo(msrest.serialization.Model):
"""AetherRemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherRemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class AetherResourceAssignment(msrest.serialization.Model):
"""AetherResourceAssignment.
:ivar attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:vartype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
_attribute_map = {
'attributes': {'key': 'attributes', 'type': '{AetherResourceAttributeAssignment}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:paramtype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
super(AetherResourceAssignment, self).__init__(**kwargs)
self.attributes = kwargs.get('attributes', None)
class AetherResourceAttributeAssignment(msrest.serialization.Model):
"""AetherResourceAttributeAssignment.
:ivar attribute:
:vartype attribute: ~flow.models.AetherResourceAttributeDefinition
:ivar operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:vartype operator: str or ~flow.models.AetherResourceOperator
:ivar value:
:vartype value: str
"""
_attribute_map = {
'attribute': {'key': 'attribute', 'type': 'AetherResourceAttributeDefinition'},
'operator': {'key': 'operator', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword attribute:
:paramtype attribute: ~flow.models.AetherResourceAttributeDefinition
:keyword operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:paramtype operator: str or ~flow.models.AetherResourceOperator
:keyword value:
:paramtype value: str
"""
super(AetherResourceAttributeAssignment, self).__init__(**kwargs)
self.attribute = kwargs.get('attribute', None)
self.operator = kwargs.get('operator', None)
self.value = kwargs.get('value', None)
class AetherResourceAttributeDefinition(msrest.serialization.Model):
"""AetherResourceAttributeDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "String", "Double".
:vartype type: str or ~flow.models.AetherResourceValueType
:ivar units:
:vartype units: str
:ivar allowed_operators:
:vartype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
_validation = {
'allowed_operators': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'units': {'key': 'units', 'type': 'str'},
'allowed_operators': {'key': 'allowedOperators', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "String", "Double".
:paramtype type: str or ~flow.models.AetherResourceValueType
:keyword units:
:paramtype units: str
:keyword allowed_operators:
:paramtype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
super(AetherResourceAttributeDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.units = kwargs.get('units', None)
self.allowed_operators = kwargs.get('allowed_operators', None)
class AetherResourceConfig(msrest.serialization.Model):
"""AetherResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(AetherResourceConfig, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class AetherResourceConfiguration(msrest.serialization.Model):
"""AetherResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AetherResourceConfiguration, self).__init__(**kwargs)
self.instance_count = kwargs.get('instance_count', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.locations = kwargs.get('locations', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
class AetherResourceModel(msrest.serialization.Model):
"""AetherResourceModel.
:ivar resources:
:vartype resources: list[~flow.models.AetherResourceAssignment]
"""
_attribute_map = {
'resources': {'key': 'resources', 'type': '[AetherResourceAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resources:
:paramtype resources: list[~flow.models.AetherResourceAssignment]
"""
super(AetherResourceModel, self).__init__(**kwargs)
self.resources = kwargs.get('resources', None)
class AetherResourcesSetting(msrest.serialization.Model):
"""AetherResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(AetherResourcesSetting, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.spark_version = kwargs.get('spark_version', None)
class AetherSavedDataSetReference(msrest.serialization.Model):
"""AetherSavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(AetherSavedDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class AetherScopeCloudConfiguration(msrest.serialization.Model):
"""AetherScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(AetherScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = kwargs.get('input_path_suffixes', None)
self.output_path_suffixes = kwargs.get('output_path_suffixes', None)
self.user_alias = kwargs.get('user_alias', None)
self.tokens = kwargs.get('tokens', None)
self.auto_token = kwargs.get('auto_token', None)
self.vcp = kwargs.get('vcp', None)
class AetherSeasonality(msrest.serialization.Model):
"""AetherSeasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherSeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherSeasonalityMode
:keyword value:
:paramtype value: int
"""
super(AetherSeasonality, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherSqlDataPath(msrest.serialization.Model):
"""AetherSqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherSqlDataPath, self).__init__(**kwargs)
self.sql_table_name = kwargs.get('sql_table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
class AetherStackEnsembleSettings(msrest.serialization.Model):
"""AetherStackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(AetherStackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None)
self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None)
self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None)
class AetherStoredProcedureParameter(msrest.serialization.Model):
"""AetherStoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
super(AetherStoredProcedureParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
self.type = kwargs.get('type', None)
class AetherStructuredInterface(msrest.serialization.Model):
"""AetherStructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.AetherControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[AetherStructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[AetherStructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[AetherControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[AetherStructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[AetherStructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.AetherControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherStructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = kwargs.get('command_line_pattern', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
self.parameters = kwargs.get('parameters', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.arguments = kwargs.get('arguments', None)
class AetherStructuredInterfaceInput(msrest.serialization.Model):
"""AetherStructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.AetherDatasetType]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.AetherDatasetType]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherStructuredInterfaceInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_ids_list = kwargs.get('data_type_ids_list', None)
self.is_optional = kwargs.get('is_optional', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_resource = kwargs.get('is_resource', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.dataset_types = kwargs.get('dataset_types', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AetherStructuredInterfaceOutput(msrest.serialization.Model):
"""AetherStructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.AetherTrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.AetherDatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'isArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'AetherTrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'AetherDatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'early_available': {'key': 'earlyAvailable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.AetherTrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.AetherDatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(AetherStructuredInterfaceOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_artifact = kwargs.get('is_artifact', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.training_output = kwargs.get('training_output', None)
self.dataset_output = kwargs.get('dataset_output', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.early_available = kwargs.get('early_available', None)
class AetherStructuredInterfaceParameter(msrest.serialization.Model):
"""AetherStructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.AetherParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.AetherUIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'AetherUIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.AetherParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.AetherUIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(AetherStructuredInterfaceParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.description = kwargs.get('description', None)
self.set_environment_variable = kwargs.get('set_environment_variable', None)
self.environment_variable_override = kwargs.get('environment_variable_override', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.ui_hint = kwargs.get('ui_hint', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
class AetherSubGraphConfiguration(msrest.serialization.Model):
"""AetherSubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_compute_internal:
:vartype default_compute_internal: ~flow.models.AetherComputeSetting
:ivar default_datastore_internal:
:vartype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar user_alias:
:vartype user_alias: str
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_compute_internal': {'key': 'defaultComputeInternal', 'type': 'AetherComputeSetting'},
'default_datastore_internal': {'key': 'defaultDatastoreInternal', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'user_alias': {'key': 'UserAlias', 'type': 'str'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_compute_internal:
:paramtype default_compute_internal: ~flow.models.AetherComputeSetting
:keyword default_datastore_internal:
:paramtype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword user_alias:
:paramtype user_alias: str
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(AetherSubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.default_compute_internal = kwargs.get('default_compute_internal', None)
self.default_datastore_internal = kwargs.get('default_datastore_internal', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.user_alias = kwargs.get('user_alias', None)
self.is_dynamic = kwargs.get('is_dynamic', False)
class AetherSweepEarlyTerminationPolicy(msrest.serialization.Model):
"""AetherSweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(AetherSweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = kwargs.get('policy_type', None)
self.evaluation_interval = kwargs.get('evaluation_interval', None)
self.delay_evaluation = kwargs.get('delay_evaluation', None)
self.slack_factor = kwargs.get('slack_factor', None)
self.slack_amount = kwargs.get('slack_amount', None)
self.truncation_percentage = kwargs.get('truncation_percentage', None)
class AetherSweepSettings(msrest.serialization.Model):
"""AetherSweepSettings.
:ivar limits:
:vartype limits: ~flow.models.AetherSweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'AetherSweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'AetherSweepEarlyTerminationPolicy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.AetherSweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
super(AetherSweepSettings, self).__init__(**kwargs)
self.limits = kwargs.get('limits', None)
self.search_space = kwargs.get('search_space', None)
self.sampling_algorithm = kwargs.get('sampling_algorithm', None)
self.early_termination = kwargs.get('early_termination', None)
class AetherSweepSettingsLimits(msrest.serialization.Model):
"""AetherSweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(AetherSweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = kwargs.get('max_total_trials', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
class AetherTargetLags(msrest.serialization.Model):
"""AetherTargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(AetherTargetLags, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.values = kwargs.get('values', None)
class AetherTargetRollingWindowSize(msrest.serialization.Model):
"""AetherTargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(AetherTargetRollingWindowSize, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class AetherTargetSelectorConfiguration(msrest.serialization.Model):
"""AetherTargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(AetherTargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
self.compute_type = kwargs.get('compute_type', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.region = kwargs.get('region', None)
self.regions = kwargs.get('regions', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
class AetherTestDataSettings(msrest.serialization.Model):
"""AetherTestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(AetherTestDataSettings, self).__init__(**kwargs)
self.test_data_size = kwargs.get('test_data_size', None)
class AetherTorchDistributedConfiguration(msrest.serialization.Model):
"""AetherTorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(AetherTorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class AetherTrainingOutput(msrest.serialization.Model):
"""AetherTrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.AetherTrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.AetherTrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(AetherTrainingOutput, self).__init__(**kwargs)
self.training_output_type = kwargs.get('training_output_type', None)
self.iteration = kwargs.get('iteration', None)
self.metric = kwargs.get('metric', None)
self.model_file = kwargs.get('model_file', None)
class AetherTrainingSettings(msrest.serialization.Model):
"""AetherTrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'AetherStackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
super(AetherTrainingSettings, self).__init__(**kwargs)
self.block_list_models = kwargs.get('block_list_models', None)
self.allow_list_models = kwargs.get('allow_list_models', None)
self.enable_dnn_training = kwargs.get('enable_dnn_training', None)
self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None)
self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None)
self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None)
self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None)
self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None)
self.enable_model_explainability = kwargs.get('enable_model_explainability', None)
self.training_mode = kwargs.get('training_mode', None)
class AetherUIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""AetherUIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'AetherUIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
super(AetherUIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = kwargs.get('capabilities', None)
class AetherUIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AetherUIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AetherUIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class AetherUIColumnPicker(msrest.serialization.Model):
"""AetherUIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(AetherUIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = kwargs.get('column_picker_for', None)
self.column_selection_categories = kwargs.get('column_selection_categories', None)
self.single_column_selection = kwargs.get('single_column_selection', None)
class AetherUIJsonEditor(msrest.serialization.Model):
"""AetherUIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(AetherUIJsonEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class AetherUIParameterHint(msrest.serialization.Model):
"""AetherUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.AetherUIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.AetherUIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'AetherUIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'AetherUIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'AetherUIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'AetherUIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.AetherUIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.AetherUIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(AetherUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.column_picker = kwargs.get('column_picker', None)
self.ui_script_language = kwargs.get('ui_script_language', None)
self.json_editor = kwargs.get('json_editor', None)
self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None)
self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
class AetherUIPromptFlowConnectionSelector(msrest.serialization.Model):
"""AetherUIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(AetherUIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None)
class AetherValidationDataSettings(msrest.serialization.Model):
"""AetherValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.AetherNCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'AetherNCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.AetherNCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(AetherValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = kwargs.get('n_cross_validations', None)
self.validation_data_size = kwargs.get('validation_data_size', None)
self.cv_split_column_names = kwargs.get('cv_split_column_names', None)
self.validation_type = kwargs.get('validation_type', None)
class AetherVsoBuildArtifactInfo(msrest.serialization.Model):
"""AetherVsoBuildArtifactInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherVsoBuildInfo
:ivar download_url:
:vartype download_url: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherVsoBuildInfo'},
'download_url': {'key': 'downloadUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherVsoBuildInfo
:keyword download_url:
:paramtype download_url: str
"""
super(AetherVsoBuildArtifactInfo, self).__init__(**kwargs)
self.build_info = kwargs.get('build_info', None)
self.download_url = kwargs.get('download_url', None)
class AetherVsoBuildDefinitionInfo(msrest.serialization.Model):
"""AetherVsoBuildDefinitionInfo.
:ivar account_name:
:vartype account_name: str
:ivar project_id:
:vartype project_id: str
:ivar build_definition_id:
:vartype build_definition_id: int
"""
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'build_definition_id': {'key': 'buildDefinitionId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword account_name:
:paramtype account_name: str
:keyword project_id:
:paramtype project_id: str
:keyword build_definition_id:
:paramtype build_definition_id: int
"""
super(AetherVsoBuildDefinitionInfo, self).__init__(**kwargs)
self.account_name = kwargs.get('account_name', None)
self.project_id = kwargs.get('project_id', None)
self.build_definition_id = kwargs.get('build_definition_id', None)
class AetherVsoBuildInfo(msrest.serialization.Model):
"""AetherVsoBuildInfo.
:ivar definition_info:
:vartype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:ivar build_id:
:vartype build_id: int
"""
_attribute_map = {
'definition_info': {'key': 'definitionInfo', 'type': 'AetherVsoBuildDefinitionInfo'},
'build_id': {'key': 'buildId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword definition_info:
:paramtype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:keyword build_id:
:paramtype build_id: int
"""
super(AetherVsoBuildInfo, self).__init__(**kwargs)
self.definition_info = kwargs.get('definition_info', None)
self.build_id = kwargs.get('build_id', None)
class AEVAComputeConfiguration(msrest.serialization.Model):
"""AEVAComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AEVAComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.is_preemptable = kwargs.get('is_preemptable', None)
class AEVAResourceConfiguration(msrest.serialization.Model):
"""AEVAResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AEVAResourceConfiguration, self).__init__(**kwargs)
self.instance_count = kwargs.get('instance_count', None)
self.instance_type = kwargs.get('instance_type', None)
self.properties = kwargs.get('properties', None)
self.locations = kwargs.get('locations', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
class AISuperComputerConfiguration(msrest.serialization.Model):
"""AISuperComputerConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar instance_types:
:vartype instance_types: list[str]
:ivar image_version:
:vartype image_version: str
:ivar location:
:vartype location: str
:ivar locations:
:vartype locations: list[str]
:ivar ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:vartype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:ivar interactive:
:vartype interactive: bool
:ivar scale_policy:
:vartype scale_policy: ~flow.models.AISuperComputerScalePolicy
:ivar virtual_cluster_arm_id:
:vartype virtual_cluster_arm_id: str
:ivar tensorboard_log_directory:
:vartype tensorboard_log_directory: str
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar ssh_public_keys:
:vartype ssh_public_keys: list[str]
:ivar enable_azml_int:
:vartype enable_azml_int: bool
:ivar priority:
:vartype priority: str
:ivar sla_tier:
:vartype sla_tier: str
:ivar suspend_on_idle_time_hours:
:vartype suspend_on_idle_time_hours: long
:ivar user_alias:
:vartype user_alias: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
:ivar group_policy_name:
:vartype group_policy_name: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'image_version': {'key': 'imageVersion', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'ai_super_computer_storage_data': {'key': 'aiSuperComputerStorageData', 'type': '{AISuperComputerStorageReferenceConfiguration}'},
'interactive': {'key': 'interactive', 'type': 'bool'},
'scale_policy': {'key': 'scalePolicy', 'type': 'AISuperComputerScalePolicy'},
'virtual_cluster_arm_id': {'key': 'virtualClusterArmId', 'type': 'str'},
'tensorboard_log_directory': {'key': 'tensorboardLogDirectory', 'type': 'str'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'ssh_public_keys': {'key': 'sshPublicKeys', 'type': '[str]'},
'enable_azml_int': {'key': 'enableAzmlInt', 'type': 'bool'},
'priority': {'key': 'priority', 'type': 'str'},
'sla_tier': {'key': 'slaTier', 'type': 'str'},
'suspend_on_idle_time_hours': {'key': 'suspendOnIdleTimeHours', 'type': 'long'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
'group_policy_name': {'key': 'groupPolicyName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword image_version:
:paramtype image_version: str
:keyword location:
:paramtype location: str
:keyword locations:
:paramtype locations: list[str]
:keyword ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:paramtype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:keyword interactive:
:paramtype interactive: bool
:keyword scale_policy:
:paramtype scale_policy: ~flow.models.AISuperComputerScalePolicy
:keyword virtual_cluster_arm_id:
:paramtype virtual_cluster_arm_id: str
:keyword tensorboard_log_directory:
:paramtype tensorboard_log_directory: str
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword ssh_public_keys:
:paramtype ssh_public_keys: list[str]
:keyword enable_azml_int:
:paramtype enable_azml_int: bool
:keyword priority:
:paramtype priority: str
:keyword sla_tier:
:paramtype sla_tier: str
:keyword suspend_on_idle_time_hours:
:paramtype suspend_on_idle_time_hours: long
:keyword user_alias:
:paramtype user_alias: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
:keyword group_policy_name:
:paramtype group_policy_name: str
"""
super(AISuperComputerConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.image_version = kwargs.get('image_version', None)
self.location = kwargs.get('location', None)
self.locations = kwargs.get('locations', None)
self.ai_super_computer_storage_data = kwargs.get('ai_super_computer_storage_data', None)
self.interactive = kwargs.get('interactive', None)
self.scale_policy = kwargs.get('scale_policy', None)
self.virtual_cluster_arm_id = kwargs.get('virtual_cluster_arm_id', None)
self.tensorboard_log_directory = kwargs.get('tensorboard_log_directory', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.ssh_public_keys = kwargs.get('ssh_public_keys', None)
self.enable_azml_int = kwargs.get('enable_azml_int', None)
self.priority = kwargs.get('priority', None)
self.sla_tier = kwargs.get('sla_tier', None)
self.suspend_on_idle_time_hours = kwargs.get('suspend_on_idle_time_hours', None)
self.user_alias = kwargs.get('user_alias', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None)
self.group_policy_name = kwargs.get('group_policy_name', None)
class AISuperComputerScalePolicy(msrest.serialization.Model):
"""AISuperComputerScalePolicy.
:ivar auto_scale_instance_type_count_set:
:vartype auto_scale_instance_type_count_set: list[int]
:ivar auto_scale_interval_in_sec:
:vartype auto_scale_interval_in_sec: int
:ivar max_instance_type_count:
:vartype max_instance_type_count: int
:ivar min_instance_type_count:
:vartype min_instance_type_count: int
"""
_attribute_map = {
'auto_scale_instance_type_count_set': {'key': 'autoScaleInstanceTypeCountSet', 'type': '[int]'},
'auto_scale_interval_in_sec': {'key': 'autoScaleIntervalInSec', 'type': 'int'},
'max_instance_type_count': {'key': 'maxInstanceTypeCount', 'type': 'int'},
'min_instance_type_count': {'key': 'minInstanceTypeCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_scale_instance_type_count_set:
:paramtype auto_scale_instance_type_count_set: list[int]
:keyword auto_scale_interval_in_sec:
:paramtype auto_scale_interval_in_sec: int
:keyword max_instance_type_count:
:paramtype max_instance_type_count: int
:keyword min_instance_type_count:
:paramtype min_instance_type_count: int
"""
super(AISuperComputerScalePolicy, self).__init__(**kwargs)
self.auto_scale_instance_type_count_set = kwargs.get('auto_scale_instance_type_count_set', None)
self.auto_scale_interval_in_sec = kwargs.get('auto_scale_interval_in_sec', None)
self.max_instance_type_count = kwargs.get('max_instance_type_count', None)
self.min_instance_type_count = kwargs.get('min_instance_type_count', None)
class AISuperComputerStorageReferenceConfiguration(msrest.serialization.Model):
"""AISuperComputerStorageReferenceConfiguration.
:ivar container_name:
:vartype container_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'container_name': {'key': 'containerName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_name:
:paramtype container_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AISuperComputerStorageReferenceConfiguration, self).__init__(**kwargs)
self.container_name = kwargs.get('container_name', None)
self.relative_path = kwargs.get('relative_path', None)
class AKSAdvanceSettings(msrest.serialization.Model):
"""AKSAdvanceSettings.
:ivar auto_scaler:
:vartype auto_scaler: ~flow.models.AutoScaler
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar num_replicas:
:vartype num_replicas: int
"""
_attribute_map = {
'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_scaler:
:paramtype auto_scaler: ~flow.models.AutoScaler
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword num_replicas:
:paramtype num_replicas: int
"""
super(AKSAdvanceSettings, self).__init__(**kwargs)
self.auto_scaler = kwargs.get('auto_scaler', None)
self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
self.num_replicas = kwargs.get('num_replicas', None)
class AKSReplicaStatus(msrest.serialization.Model):
"""AKSReplicaStatus.
:ivar desired_replicas:
:vartype desired_replicas: int
:ivar updated_replicas:
:vartype updated_replicas: int
:ivar available_replicas:
:vartype available_replicas: int
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
"""
_attribute_map = {
'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword desired_replicas:
:paramtype desired_replicas: int
:keyword updated_replicas:
:paramtype updated_replicas: int
:keyword available_replicas:
:paramtype available_replicas: int
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
"""
super(AKSReplicaStatus, self).__init__(**kwargs)
self.desired_replicas = kwargs.get('desired_replicas', None)
self.updated_replicas = kwargs.get('updated_replicas', None)
self.available_replicas = kwargs.get('available_replicas', None)
self.error = kwargs.get('error', None)
class AMLComputeConfiguration(msrest.serialization.Model):
"""AMLComputeConfiguration.
:ivar name:
:vartype name: str
:ivar vm_size:
:vartype vm_size: str
:ivar vm_priority: Possible values include: "Dedicated", "Lowpriority".
:vartype vm_priority: str or ~flow.models.VmPriority
:ivar retain_cluster:
:vartype retain_cluster: bool
:ivar cluster_max_node_count:
:vartype cluster_max_node_count: int
:ivar os_type:
:vartype os_type: str
:ivar virtual_machine_image:
:vartype virtual_machine_image: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'vm_priority': {'key': 'vmPriority', 'type': 'str'},
'retain_cluster': {'key': 'retainCluster', 'type': 'bool'},
'cluster_max_node_count': {'key': 'clusterMaxNodeCount', 'type': 'int'},
'os_type': {'key': 'osType', 'type': 'str'},
'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword vm_size:
:paramtype vm_size: str
:keyword vm_priority: Possible values include: "Dedicated", "Lowpriority".
:paramtype vm_priority: str or ~flow.models.VmPriority
:keyword retain_cluster:
:paramtype retain_cluster: bool
:keyword cluster_max_node_count:
:paramtype cluster_max_node_count: int
:keyword os_type:
:paramtype os_type: str
:keyword virtual_machine_image:
:paramtype virtual_machine_image: str
"""
super(AMLComputeConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.vm_size = kwargs.get('vm_size', None)
self.vm_priority = kwargs.get('vm_priority', None)
self.retain_cluster = kwargs.get('retain_cluster', None)
self.cluster_max_node_count = kwargs.get('cluster_max_node_count', None)
self.os_type = kwargs.get('os_type', None)
self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
class AmlDataset(msrest.serialization.Model):
"""AmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = kwargs.get('registered_data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class AmlK8SConfiguration(msrest.serialization.Model):
"""AmlK8SConfiguration.
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfiguration
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
_attribute_map = {
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfiguration'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AmlK8SPriorityConfiguration'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfiguration
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
super(AmlK8SConfiguration, self).__init__(**kwargs)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class AmlK8SPriorityConfiguration(msrest.serialization.Model):
"""AmlK8SPriorityConfiguration.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AmlK8SPriorityConfiguration, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class AmlSparkCloudSetting(msrest.serialization.Model):
"""AmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.EntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.ResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'EntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'ResourcesSetting'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.EntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.ResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
"""
super(AmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = kwargs.get('entry', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.inline_environment_definition_string = kwargs.get('inline_environment_definition_string', None)
self.conf = kwargs.get('conf', None)
self.compute = kwargs.get('compute', None)
self.resources = kwargs.get('resources', None)
self.identity = kwargs.get('identity', None)
class APCloudConfiguration(msrest.serialization.Model):
"""APCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
"""
super(APCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = kwargs.get('referenced_ap_module_guid', None)
self.user_alias = kwargs.get('user_alias', None)
self.aether_module_type = kwargs.get('aether_module_type', None)
class ApiAndParameters(msrest.serialization.Model):
"""ApiAndParameters.
:ivar api:
:vartype api: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:ivar default_prompt:
:vartype default_prompt: str
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{FlowToolSettingParameter}'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword api:
:paramtype api: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:keyword default_prompt:
:paramtype default_prompt: str
"""
super(ApiAndParameters, self).__init__(**kwargs)
self.api = kwargs.get('api', None)
self.parameters = kwargs.get('parameters', None)
self.default_prompt = kwargs.get('default_prompt', None)
class ApplicationEndpointConfiguration(msrest.serialization.Model):
"""ApplicationEndpointConfiguration.
:ivar type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode",
"Theia", "Grafana", "Custom", "RayDashboard".
:vartype type: str or ~flow.models.ApplicationEndpointType
:ivar port:
:vartype port: int
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.Nodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'Nodes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard",
"VSCode", "Theia", "Grafana", "Custom", "RayDashboard".
:paramtype type: str or ~flow.models.ApplicationEndpointType
:keyword port:
:paramtype port: int
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.Nodes
"""
super(ApplicationEndpointConfiguration, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class ArgumentAssignment(msrest.serialization.Model):
"""ArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.ArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.ArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[ArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.ArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.ArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
super(ArgumentAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.value = kwargs.get('value', None)
self.nested_argument_list = kwargs.get('nested_argument_list', None)
self.string_interpolation_argument_list = kwargs.get('string_interpolation_argument_list', None)
class Asset(msrest.serialization.Model):
"""Asset.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(Asset, self).__init__(**kwargs)
self.asset_id = kwargs.get('asset_id', None)
self.type = kwargs.get('type', None)
class AssetDefinition(msrest.serialization.Model):
"""AssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AssetDefinition, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.type = kwargs.get('type', None)
self.asset_id = kwargs.get('asset_id', None)
self.serialized_asset_id = kwargs.get('serialized_asset_id', None)
class AssetNameAndVersionIdentifier(msrest.serialization.Model):
"""AssetNameAndVersionIdentifier.
:ivar asset_name:
:vartype asset_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
"""
_attribute_map = {
'asset_name': {'key': 'assetName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_name:
:paramtype asset_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
"""
super(AssetNameAndVersionIdentifier, self).__init__(**kwargs)
self.asset_name = kwargs.get('asset_name', None)
self.version = kwargs.get('version', None)
self.feed_name = kwargs.get('feed_name', None)
class AssetOutputSettings(msrest.serialization.Model):
"""AssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.ParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'ParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.ParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AssetOutputSettings, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.path_parameter_assignment = kwargs.get('path_parameter_assignment', None)
self.type = kwargs.get('type', None)
self.options = kwargs.get('options', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class AssetOutputSettingsParameter(msrest.serialization.Model):
"""AssetOutputSettingsParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.AssetOutputSettings
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'AssetOutputSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.AssetOutputSettings
"""
super(AssetOutputSettingsParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
class AssetPublishResult(msrest.serialization.Model):
"""AssetPublishResult.
:ivar feed_name:
:vartype feed_name: str
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:vartype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'regional_publish_results': {'key': 'regionalPublishResults', 'type': '{AssetPublishSingleRegionResult}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:paramtype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
super(AssetPublishResult, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.asset_name = kwargs.get('asset_name', None)
self.asset_version = kwargs.get('asset_version', None)
self.step_name = kwargs.get('step_name', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.created_time = kwargs.get('created_time', None)
self.last_updated_time = kwargs.get('last_updated_time', None)
self.regional_publish_results = kwargs.get('regional_publish_results', None)
class AssetPublishSingleRegionResult(msrest.serialization.Model):
"""AssetPublishSingleRegionResult.
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar total_steps:
:vartype total_steps: int
:ivar finished_steps:
:vartype finished_steps: int
:ivar remaining_steps:
:vartype remaining_steps: int
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'finished_steps': {'key': 'finishedSteps', 'type': 'int'},
'remaining_steps': {'key': 'remainingSteps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword total_steps:
:paramtype total_steps: int
:keyword finished_steps:
:paramtype finished_steps: int
:keyword remaining_steps:
:paramtype remaining_steps: int
"""
super(AssetPublishSingleRegionResult, self).__init__(**kwargs)
self.step_name = kwargs.get('step_name', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.last_updated_time = kwargs.get('last_updated_time', None)
self.total_steps = kwargs.get('total_steps', None)
self.finished_steps = kwargs.get('finished_steps', None)
self.remaining_steps = kwargs.get('remaining_steps', None)
class AssetTypeMetaInfo(msrest.serialization.Model):
"""AssetTypeMetaInfo.
:ivar consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:vartype consumption_mode: str or ~flow.models.ConsumeMode
"""
_attribute_map = {
'consumption_mode': {'key': 'consumptionMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:paramtype consumption_mode: str or ~flow.models.ConsumeMode
"""
super(AssetTypeMetaInfo, self).__init__(**kwargs)
self.consumption_mode = kwargs.get('consumption_mode', None)
class AssetVersionPublishRequest(msrest.serialization.Model):
"""AssetVersionPublishRequest.
:ivar asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:vartype asset_type: str or ~flow.models.AssetType
:ivar asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:vartype asset_source_type: str or ~flow.models.AssetSourceType
:ivar yaml_file:
:vartype yaml_file: str
:ivar source_zip_url:
:vartype source_zip_url: str
:ivar source_zip_file:
:vartype source_zip_file: IO
:ivar feed_name:
:vartype feed_name: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar referenced_assets:
:vartype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:ivar flow_file:
:vartype flow_file: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'asset_type': {'key': 'assetType', 'type': 'str'},
'asset_source_type': {'key': 'assetSourceType', 'type': 'str'},
'yaml_file': {'key': 'yamlFile', 'type': 'str'},
'source_zip_url': {'key': 'sourceZipUrl', 'type': 'str'},
'source_zip_file': {'key': 'sourceZipFile', 'type': 'IO'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'referenced_assets': {'key': 'referencedAssets', 'type': '[AssetNameAndVersionIdentifier]'},
'flow_file': {'key': 'flowFile', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:paramtype asset_type: str or ~flow.models.AssetType
:keyword asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:paramtype asset_source_type: str or ~flow.models.AssetSourceType
:keyword yaml_file:
:paramtype yaml_file: str
:keyword source_zip_url:
:paramtype source_zip_url: str
:keyword source_zip_file:
:paramtype source_zip_file: IO
:keyword feed_name:
:paramtype feed_name: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword referenced_assets:
:paramtype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:keyword flow_file:
:paramtype flow_file: str
:keyword version:
:paramtype version: str
"""
super(AssetVersionPublishRequest, self).__init__(**kwargs)
self.asset_type = kwargs.get('asset_type', None)
self.asset_source_type = kwargs.get('asset_source_type', None)
self.yaml_file = kwargs.get('yaml_file', None)
self.source_zip_url = kwargs.get('source_zip_url', None)
self.source_zip_file = kwargs.get('source_zip_file', None)
self.feed_name = kwargs.get('feed_name', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.referenced_assets = kwargs.get('referenced_assets', None)
self.flow_file = kwargs.get('flow_file', None)
self.version = kwargs.get('version', None)
class AssignedUser(msrest.serialization.Model):
"""AssignedUser.
:ivar object_id:
:vartype object_id: str
:ivar tenant_id:
:vartype tenant_id: str
"""
_attribute_map = {
'object_id': {'key': 'objectId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword object_id:
:paramtype object_id: str
:keyword tenant_id:
:paramtype tenant_id: str
"""
super(AssignedUser, self).__init__(**kwargs)
self.object_id = kwargs.get('object_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
class AuthKeys(msrest.serialization.Model):
"""AuthKeys.
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
"""
_attribute_map = {
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
"""
super(AuthKeys, self).__init__(**kwargs)
self.primary_key = kwargs.get('primary_key', None)
self.secondary_key = kwargs.get('secondary_key', None)
class AutoClusterComputeSpecification(msrest.serialization.Model):
"""AutoClusterComputeSpecification.
:ivar instance_size:
:vartype instance_size: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar os_type:
:vartype os_type: str
:ivar location:
:vartype location: str
:ivar runtime_version:
:vartype runtime_version: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword os_type:
:paramtype os_type: str
:keyword location:
:paramtype location: str
:keyword runtime_version:
:paramtype runtime_version: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
"""
super(AutoClusterComputeSpecification, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.os_type = kwargs.get('os_type', None)
self.location = kwargs.get('location', None)
self.runtime_version = kwargs.get('runtime_version', None)
self.quota_enforcement_resource_id = kwargs.get('quota_enforcement_resource_id', None)
self.model_compute_specification_id = kwargs.get('model_compute_specification_id', None)
class AutoDeleteSetting(msrest.serialization.Model):
"""AutoDeleteSetting.
:ivar condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:vartype condition: str or ~flow.models.AutoDeleteCondition
:ivar value:
:vartype value: str
"""
_attribute_map = {
'condition': {'key': 'condition', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:paramtype condition: str or ~flow.models.AutoDeleteCondition
:keyword value:
:paramtype value: str
"""
super(AutoDeleteSetting, self).__init__(**kwargs)
self.condition = kwargs.get('condition', None)
self.value = kwargs.get('value', None)
class AutoFeaturizeConfiguration(msrest.serialization.Model):
"""AutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.FeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'FeaturizationSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.FeaturizationSettings
"""
super(AutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = kwargs.get('featurization_config', None)
class AutologgerSettings(msrest.serialization.Model):
"""AutologgerSettings.
:ivar ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:vartype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
_attribute_map = {
'ml_flow_autologger': {'key': 'mlFlowAutologger', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
super(AutologgerSettings, self).__init__(**kwargs)
self.ml_flow_autologger = kwargs.get('ml_flow_autologger', None)
class AutoMLComponentConfiguration(msrest.serialization.Model):
"""AutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AutoFeaturizeConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
super(AutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = kwargs.get('auto_train_config', None)
self.auto_featurize_config = kwargs.get('auto_featurize_config', None)
class AutoScaler(msrest.serialization.Model):
"""AutoScaler.
:ivar autoscale_enabled:
:vartype autoscale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
"""
_attribute_map = {
'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword autoscale_enabled:
:paramtype autoscale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
"""
super(AutoScaler, self).__init__(**kwargs)
self.autoscale_enabled = kwargs.get('autoscale_enabled', None)
self.min_replicas = kwargs.get('min_replicas', None)
self.max_replicas = kwargs.get('max_replicas', None)
self.target_utilization = kwargs.get('target_utilization', None)
self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
class AutoTrainConfiguration(msrest.serialization.Model):
"""AutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.GeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.LimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.DataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.ForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.TrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.SweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AEVAComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'LimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'SweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AEVAComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AEVAResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.GeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.LimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.DataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.ForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.TrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.SweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AEVAComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = kwargs.get('general_settings', None)
self.limit_settings = kwargs.get('limit_settings', None)
self.data_settings = kwargs.get('data_settings', None)
self.forecasting_settings = kwargs.get('forecasting_settings', None)
self.training_settings = kwargs.get('training_settings', None)
self.sweep_settings = kwargs.get('sweep_settings', None)
self.image_model_settings = kwargs.get('image_model_settings', None)
self.properties = kwargs.get('properties', None)
self.compute_configuration = kwargs.get('compute_configuration', None)
self.resource_configurtion = kwargs.get('resource_configurtion', None)
self.environment_id = kwargs.get('environment_id', None)
self.environment_variables = kwargs.get('environment_variables', None)
class AvailabilityResponse(msrest.serialization.Model):
"""AvailabilityResponse.
:ivar is_available:
:vartype is_available: bool
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'is_available': {'key': 'isAvailable', 'type': 'bool'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_available:
:paramtype is_available: bool
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(AvailabilityResponse, self).__init__(**kwargs)
self.is_available = kwargs.get('is_available', None)
self.error = kwargs.get('error', None)
class AzureBlobReference(msrest.serialization.Model):
"""AzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureBlobReference, self).__init__(**kwargs)
self.container = kwargs.get('container', None)
self.sas_token = kwargs.get('sas_token', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDatabaseReference(msrest.serialization.Model):
"""AzureDatabaseReference.
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDatabaseReference, self).__init__(**kwargs)
self.table_name = kwargs.get('table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDataLakeGen2Reference(msrest.serialization.Model):
"""AzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = kwargs.get('file_system_name', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureDataLakeReference(msrest.serialization.Model):
"""AzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeReference, self).__init__(**kwargs)
self.tenant = kwargs.get('tenant', None)
self.subscription = kwargs.get('subscription', None)
self.resource_group = kwargs.get('resource_group', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureFilesReference(msrest.serialization.Model):
"""AzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureFilesReference, self).__init__(**kwargs)
self.share = kwargs.get('share', None)
self.uri = kwargs.get('uri', None)
self.account = kwargs.get('account', None)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class AzureMLModuleVersionDescriptor(msrest.serialization.Model):
"""AzureMLModuleVersionDescriptor.
:ivar module_version_id:
:vartype module_version_id: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_version_id:
:paramtype module_version_id: str
:keyword version:
:paramtype version: str
"""
super(AzureMLModuleVersionDescriptor, self).__init__(**kwargs)
self.module_version_id = kwargs.get('module_version_id', None)
self.version = kwargs.get('version', None)
class AzureOpenAIDeploymentDto(msrest.serialization.Model):
"""AzureOpenAIDeploymentDto.
:ivar name:
:vartype name: str
:ivar model_name:
:vartype model_name: str
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'model_name': {'key': 'modelName', 'type': 'str'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword model_name:
:paramtype model_name: str
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
super(AzureOpenAIDeploymentDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.model_name = kwargs.get('model_name', None)
self.capabilities = kwargs.get('capabilities', None)
class AzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'completion', 'type': 'bool'},
'chat_completion': {'key': 'chat_completion', 'type': 'bool'},
'embeddings': {'key': 'embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class BatchAiComputeInfo(msrest.serialization.Model):
"""BatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(BatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = kwargs.get('batch_ai_subscription_id', None)
self.batch_ai_resource_group = kwargs.get('batch_ai_resource_group', None)
self.batch_ai_workspace_name = kwargs.get('batch_ai_workspace_name', None)
self.cluster_name = kwargs.get('cluster_name', None)
self.native_shared_directory = kwargs.get('native_shared_directory', None)
class BatchDataInput(msrest.serialization.Model):
"""BatchDataInput.
:ivar data_uri:
:vartype data_uri: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'data_uri': {'key': 'dataUri', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_uri:
:paramtype data_uri: str
:keyword type:
:paramtype type: str
"""
super(BatchDataInput, self).__init__(**kwargs)
self.data_uri = kwargs.get('data_uri', None)
self.type = kwargs.get('type', None)
class BatchExportComponentSpecResponse(msrest.serialization.Model):
"""BatchExportComponentSpecResponse.
:ivar component_spec_meta_infos:
:vartype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'component_spec_meta_infos': {'key': 'componentSpecMetaInfos', 'type': '[ComponentSpecMetaInfo]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_spec_meta_infos:
:paramtype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportComponentSpecResponse, self).__init__(**kwargs)
self.component_spec_meta_infos = kwargs.get('component_spec_meta_infos', None)
self.errors = kwargs.get('errors', None)
class BatchExportRawComponentResponse(msrest.serialization.Model):
"""BatchExportRawComponentResponse.
:ivar raw_component_dtos:
:vartype raw_component_dtos: list[~flow.models.RawComponentDto]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'raw_component_dtos': {'key': 'rawComponentDtos', 'type': '[RawComponentDto]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword raw_component_dtos:
:paramtype raw_component_dtos: list[~flow.models.RawComponentDto]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportRawComponentResponse, self).__init__(**kwargs)
self.raw_component_dtos = kwargs.get('raw_component_dtos', None)
self.errors = kwargs.get('errors', None)
class BatchGetComponentHashesRequest(msrest.serialization.Model):
"""BatchGetComponentHashesRequest.
:ivar module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:vartype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:ivar module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:vartype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
_attribute_map = {
'module_hash_version': {'key': 'moduleHashVersion', 'type': 'str'},
'module_entities': {'key': 'moduleEntities', 'type': '{AetherModuleEntity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:paramtype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:keyword module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:paramtype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
super(BatchGetComponentHashesRequest, self).__init__(**kwargs)
self.module_hash_version = kwargs.get('module_hash_version', None)
self.module_entities = kwargs.get('module_entities', None)
class BatchGetComponentRequest(msrest.serialization.Model):
"""BatchGetComponentRequest.
:ivar version_ids:
:vartype version_ids: list[str]
:ivar name_and_versions:
:vartype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
_attribute_map = {
'version_ids': {'key': 'versionIds', 'type': '[str]'},
'name_and_versions': {'key': 'nameAndVersions', 'type': '[ComponentNameMetaInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword version_ids:
:paramtype version_ids: list[str]
:keyword name_and_versions:
:paramtype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
super(BatchGetComponentRequest, self).__init__(**kwargs)
self.version_ids = kwargs.get('version_ids', None)
self.name_and_versions = kwargs.get('name_and_versions', None)
class Binding(msrest.serialization.Model):
"""Binding.
:ivar binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:vartype binding_type: str
"""
_attribute_map = {
'binding_type': {'key': 'bindingType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:paramtype binding_type: str
"""
super(Binding, self).__init__(**kwargs)
self.binding_type = kwargs.get('binding_type', None)
class BulkTestDto(msrest.serialization.Model):
"""BulkTestDto.
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar runtime:
:vartype runtime: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar evaluation_count:
:vartype evaluation_count: int
:ivar variant_count:
:vartype variant_count: int
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
"""
_attribute_map = {
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'runtime': {'key': 'runtime', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'evaluation_count': {'key': 'evaluationCount', 'type': 'int'},
'variant_count': {'key': 'variantCount', 'type': 'int'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
}
def __init__(
self,
**kwargs
):
"""
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword runtime:
:paramtype runtime: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword evaluation_count:
:paramtype evaluation_count: int
:keyword variant_count:
:paramtype variant_count: int
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
"""
super(BulkTestDto, self).__init__(**kwargs)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.runtime = kwargs.get('runtime', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.evaluation_count = kwargs.get('evaluation_count', None)
self.variant_count = kwargs.get('variant_count', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
class CloudError(msrest.serialization.Model):
"""CloudError.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.CloudError]
:ivar additional_info:
:vartype additional_info: list[~flow.models.AdditionalErrorInfo]
"""
_validation = {
'details': {'readonly': True},
'additional_info': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudError]'},
'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(CloudError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = None
self.additional_info = None
class CloudPrioritySetting(msrest.serialization.Model):
"""CloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.PriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.PriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.PriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.PriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'PriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'PriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'PriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'PriorityConfiguration'},
}
def __init__(
self,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.PriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.PriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.PriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.PriorityConfiguration
"""
super(CloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = kwargs.get('scope_priority', None)
self.aml_compute_priority = kwargs.get('aml_compute_priority', None)
self.itp_priority = kwargs.get('itp_priority', None)
self.singularity_priority = kwargs.get('singularity_priority', None)
class CloudSettings(msrest.serialization.Model):
"""CloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.ParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.PriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.HdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.SubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.APCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.EsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[ParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'PriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'HdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'SubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'APCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'ScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'EsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'DataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'DataTransferV2CloudSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.ParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.PriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.HdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.SubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.APCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.EsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
super(CloudSettings, self).__init__(**kwargs)
self.linked_settings = kwargs.get('linked_settings', None)
self.priority_config = kwargs.get('priority_config', None)
self.hdi_run_config = kwargs.get('hdi_run_config', None)
self.sub_graph_config = kwargs.get('sub_graph_config', None)
self.auto_ml_component_config = kwargs.get('auto_ml_component_config', None)
self.ap_cloud_config = kwargs.get('ap_cloud_config', None)
self.scope_cloud_config = kwargs.get('scope_cloud_config', None)
self.es_cloud_config = kwargs.get('es_cloud_config', None)
self.data_transfer_cloud_config = kwargs.get('data_transfer_cloud_config', None)
self.aml_spark_cloud_setting = kwargs.get('aml_spark_cloud_setting', None)
self.data_transfer_v2_cloud_setting = kwargs.get('data_transfer_v2_cloud_setting', None)
class ColumnTransformer(msrest.serialization.Model):
"""ColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(ColumnTransformer, self).__init__(**kwargs)
self.fields = kwargs.get('fields', None)
self.parameters = kwargs.get('parameters', None)
class CommandJob(msrest.serialization.Model):
"""CommandJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar code_id:
:vartype code_id: str
:ivar command:
:vartype command: str
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar distribution:
:vartype distribution: ~flow.models.DistributionConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:ivar limits:
:vartype limits: ~flow.models.CommandJobLimits
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_validation = {
'command': {'min_length': 1},
}
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'MfeInternalAutologgerSettings'},
'limits': {'key': 'limits', 'type': 'CommandJobLimits'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword code_id:
:paramtype code_id: str
:keyword command:
:paramtype command: str
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword distribution:
:paramtype distribution: ~flow.models.DistributionConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:keyword limits:
:paramtype limits: ~flow.models.CommandJobLimits
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CommandJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.code_id = kwargs.get('code_id', None)
self.command = kwargs.get('command', None)
self.environment_id = kwargs.get('environment_id', None)
self.input_data_bindings = kwargs.get('input_data_bindings', None)
self.output_data_bindings = kwargs.get('output_data_bindings', None)
self.distribution = kwargs.get('distribution', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.parameters = kwargs.get('parameters', None)
self.autologger_settings = kwargs.get('autologger_settings', None)
self.limits = kwargs.get('limits', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class CommandJobLimits(msrest.serialization.Model):
"""CommandJobLimits.
:ivar job_limits_type: Possible values include: "Command", "Sweep".
:vartype job_limits_type: str or ~flow.models.JobLimitsType
:ivar timeout:
:vartype timeout: str
"""
_attribute_map = {
'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'},
'timeout': {'key': 'timeout', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_limits_type: Possible values include: "Command", "Sweep".
:paramtype job_limits_type: str or ~flow.models.JobLimitsType
:keyword timeout:
:paramtype timeout: str
"""
super(CommandJobLimits, self).__init__(**kwargs)
self.job_limits_type = kwargs.get('job_limits_type', None)
self.timeout = kwargs.get('timeout', None)
class CommandReturnCodeConfig(msrest.serialization.Model):
"""CommandReturnCodeConfig.
:ivar return_code: Possible values include: "Zero", "ZeroOrGreater".
:vartype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:ivar successful_return_codes:
:vartype successful_return_codes: list[int]
"""
_attribute_map = {
'return_code': {'key': 'returnCode', 'type': 'str'},
'successful_return_codes': {'key': 'successfulReturnCodes', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword return_code: Possible values include: "Zero", "ZeroOrGreater".
:paramtype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:keyword successful_return_codes:
:paramtype successful_return_codes: list[int]
"""
super(CommandReturnCodeConfig, self).__init__(**kwargs)
self.return_code = kwargs.get('return_code', None)
self.successful_return_codes = kwargs.get('successful_return_codes', None)
class ComponentConfiguration(msrest.serialization.Model):
"""ComponentConfiguration.
:ivar component_identifier:
:vartype component_identifier: str
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
"""
super(ComponentConfiguration, self).__init__(**kwargs)
self.component_identifier = kwargs.get('component_identifier', None)
class ComponentInput(msrest.serialization.Model):
"""ComponentInput.
:ivar name:
:vartype name: str
:ivar optional:
:vartype optional: bool
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
:ivar default:
:vartype default: str
:ivar enum:
:vartype enum: list[str]
:ivar min:
:vartype min: str
:ivar max:
:vartype max: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'optional': {'key': 'optional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'min': {'key': 'min', 'type': 'str'},
'max': {'key': 'max', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword optional:
:paramtype optional: bool
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
:keyword default:
:paramtype default: str
:keyword enum:
:paramtype enum: list[str]
:keyword min:
:paramtype min: str
:keyword max:
:paramtype max: str
"""
super(ComponentInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.optional = kwargs.get('optional', None)
self.description = kwargs.get('description', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.enum = kwargs.get('enum', None)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class ComponentJob(msrest.serialization.Model):
"""ComponentJob.
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentJobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
_attribute_map = {
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentJobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
super(ComponentJob, self).__init__(**kwargs)
self.compute = kwargs.get('compute', None)
self.component_id = kwargs.get('component_id', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class ComponentJobInput(msrest.serialization.Model):
"""ComponentJobInput.
:ivar data:
:vartype data: ~flow.models.InputData
:ivar input_binding:
:vartype input_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
'input_binding': {'key': 'inputBinding', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
:keyword input_binding:
:paramtype input_binding: str
"""
super(ComponentJobInput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
self.input_binding = kwargs.get('input_binding', None)
class ComponentJobOutput(msrest.serialization.Model):
"""ComponentJobOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
:ivar output_binding:
:vartype output_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
'output_binding': {'key': 'outputBinding', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
:keyword output_binding:
:paramtype output_binding: str
"""
super(ComponentJobOutput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
self.output_binding = kwargs.get('output_binding', None)
class ComponentNameAndDefaultVersion(msrest.serialization.Model):
"""ComponentNameAndDefaultVersion.
:ivar component_name:
:vartype component_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameAndDefaultVersion, self).__init__(**kwargs)
self.component_name = kwargs.get('component_name', None)
self.version = kwargs.get('version', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
class ComponentNameMetaInfo(msrest.serialization.Model):
"""ComponentNameMetaInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameMetaInfo, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.registry_name = kwargs.get('registry_name', None)
class ComponentOutput(msrest.serialization.Model):
"""ComponentOutput.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
"""
super(ComponentOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.type = kwargs.get('type', None)
class ComponentPreflightResult(msrest.serialization.Model):
"""ComponentPreflightResult.
:ivar error_details:
:vartype error_details: list[~flow.models.RootError]
"""
_attribute_map = {
'error_details': {'key': 'errorDetails', 'type': '[RootError]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error_details:
:paramtype error_details: list[~flow.models.RootError]
"""
super(ComponentPreflightResult, self).__init__(**kwargs)
self.error_details = kwargs.get('error_details', None)
class ComponentSpecMetaInfo(msrest.serialization.Model):
"""ComponentSpecMetaInfo.
:ivar component_spec: Anything.
:vartype component_spec: any
:ivar component_version:
:vartype component_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar component_name:
:vartype component_name: str
:ivar description:
:vartype description: str
:ivar is_archived:
:vartype is_archived: bool
"""
_attribute_map = {
'component_spec': {'key': 'componentSpec', 'type': 'object'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
'component_name': {'key': 'componentName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_spec: Anything.
:paramtype component_spec: any
:keyword component_version:
:paramtype component_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword component_name:
:paramtype component_name: str
:keyword description:
:paramtype description: str
:keyword is_archived:
:paramtype is_archived: bool
"""
super(ComponentSpecMetaInfo, self).__init__(**kwargs)
self.component_spec = kwargs.get('component_spec', None)
self.component_version = kwargs.get('component_version', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.properties = kwargs.get('properties', None)
self.tags = kwargs.get('tags', None)
self.component_name = kwargs.get('component_name', None)
self.description = kwargs.get('description', None)
self.is_archived = kwargs.get('is_archived', None)
class ComponentUpdateRequest(msrest.serialization.Model):
"""ComponentUpdateRequest.
:ivar original_module_entity:
:vartype original_module_entity: ~flow.models.ModuleEntity
:ivar update_module_entity:
:vartype update_module_entity: ~flow.models.ModuleEntity
:ivar module_name:
:vartype module_name: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar overwrite_with_original_name_and_version:
:vartype overwrite_with_original_name_and_version: bool
:ivar snapshot_id:
:vartype snapshot_id: str
"""
_attribute_map = {
'original_module_entity': {'key': 'originalModuleEntity', 'type': 'ModuleEntity'},
'update_module_entity': {'key': 'updateModuleEntity', 'type': 'ModuleEntity'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'overwrite_with_original_name_and_version': {'key': 'overwriteWithOriginalNameAndVersion', 'type': 'bool'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword original_module_entity:
:paramtype original_module_entity: ~flow.models.ModuleEntity
:keyword update_module_entity:
:paramtype update_module_entity: ~flow.models.ModuleEntity
:keyword module_name:
:paramtype module_name: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword overwrite_with_original_name_and_version:
:paramtype overwrite_with_original_name_and_version: bool
:keyword snapshot_id:
:paramtype snapshot_id: str
"""
super(ComponentUpdateRequest, self).__init__(**kwargs)
self.original_module_entity = kwargs.get('original_module_entity', None)
self.update_module_entity = kwargs.get('update_module_entity', None)
self.module_name = kwargs.get('module_name', None)
self.properties = kwargs.get('properties', None)
self.overwrite_with_original_name_and_version = kwargs.get('overwrite_with_original_name_and_version', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
class ComponentValidationRequest(msrest.serialization.Model):
"""ComponentValidationRequest.
:ivar component_identifier:
:vartype component_identifier: str
:ivar compute_identity:
:vartype compute_identity: ~flow.models.ComputeIdentityDto
:ivar execution_context_dto:
:vartype execution_context_dto: ~flow.models.ExecutionContextDto
:ivar environment_definition:
:vartype environment_definition: ~flow.models.EnvironmentDefinitionDto
:ivar data_port_dtos:
:vartype data_port_dtos: list[~flow.models.DataPortDto]
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
'compute_identity': {'key': 'computeIdentity', 'type': 'ComputeIdentityDto'},
'execution_context_dto': {'key': 'executionContextDto', 'type': 'ExecutionContextDto'},
'environment_definition': {'key': 'environmentDefinition', 'type': 'EnvironmentDefinitionDto'},
'data_port_dtos': {'key': 'dataPortDtos', 'type': '[DataPortDto]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
:keyword compute_identity:
:paramtype compute_identity: ~flow.models.ComputeIdentityDto
:keyword execution_context_dto:
:paramtype execution_context_dto: ~flow.models.ExecutionContextDto
:keyword environment_definition:
:paramtype environment_definition: ~flow.models.EnvironmentDefinitionDto
:keyword data_port_dtos:
:paramtype data_port_dtos: list[~flow.models.DataPortDto]
"""
super(ComponentValidationRequest, self).__init__(**kwargs)
self.component_identifier = kwargs.get('component_identifier', None)
self.compute_identity = kwargs.get('compute_identity', None)
self.execution_context_dto = kwargs.get('execution_context_dto', None)
self.environment_definition = kwargs.get('environment_definition', None)
self.data_port_dtos = kwargs.get('data_port_dtos', None)
class ComponentValidationResponse(msrest.serialization.Model):
"""ComponentValidationResponse.
:ivar status: Possible values include: "Succeeded", "Failed".
:vartype status: str or ~flow.models.ValidationStatus
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "Succeeded", "Failed".
:paramtype status: str or ~flow.models.ValidationStatus
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(ComponentValidationResponse, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class Compute(msrest.serialization.Model):
"""Compute.
:ivar target:
:vartype target: str
:ivar target_type:
:vartype target_type: str
:ivar vm_size:
:vartype vm_size: str
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar gpu_count:
:vartype gpu_count: int
:ivar priority:
:vartype priority: str
:ivar region:
:vartype region: str
:ivar arm_id:
:vartype arm_id: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'target_type': {'key': 'targetType', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'priority': {'key': 'priority', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword target_type:
:paramtype target_type: str
:keyword vm_size:
:paramtype vm_size: str
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword gpu_count:
:paramtype gpu_count: int
:keyword priority:
:paramtype priority: str
:keyword region:
:paramtype region: str
:keyword arm_id:
:paramtype arm_id: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(Compute, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.target_type = kwargs.get('target_type', None)
self.vm_size = kwargs.get('vm_size', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.gpu_count = kwargs.get('gpu_count', None)
self.priority = kwargs.get('priority', None)
self.region = kwargs.get('region', None)
self.arm_id = kwargs.get('arm_id', None)
self.properties = kwargs.get('properties', None)
class ComputeConfiguration(msrest.serialization.Model):
"""ComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar max_instance_count:
:vartype max_instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar job_priority:
:vartype job_priority: int
:ivar shm_size:
:vartype shm_size: str
:ivar docker_args:
:vartype docker_args: str
:ivar locations:
:vartype locations: list[str]
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'docker_args': {'key': 'dockerArgs', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword max_instance_count:
:paramtype max_instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword job_priority:
:paramtype job_priority: int
:keyword shm_size:
:paramtype shm_size: str
:keyword docker_args:
:paramtype docker_args: str
:keyword locations:
:paramtype locations: list[str]
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
"""
super(ComputeConfiguration, self).__init__(**kwargs)
self.target = kwargs.get('target', None)
self.instance_count = kwargs.get('instance_count', None)
self.max_instance_count = kwargs.get('max_instance_count', None)
self.is_local = kwargs.get('is_local', None)
self.location = kwargs.get('location', None)
self.is_clusterless = kwargs.get('is_clusterless', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_priority = kwargs.get('instance_priority', None)
self.job_priority = kwargs.get('job_priority', None)
self.shm_size = kwargs.get('shm_size', None)
self.docker_args = kwargs.get('docker_args', None)
self.locations = kwargs.get('locations', None)
self.properties = kwargs.get('properties', None)
class ComputeContract(msrest.serialization.Model):
"""ComputeContract.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar location:
:vartype location: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar identity:
:vartype identity: ~flow.models.ComputeIdentityContract
:ivar properties:
:vartype properties: ~flow.models.ComputeProperties
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ComputeIdentityContract'},
'properties': {'key': 'properties', 'type': 'ComputeProperties'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword location:
:paramtype location: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword identity:
:paramtype identity: ~flow.models.ComputeIdentityContract
:keyword properties:
:paramtype properties: ~flow.models.ComputeProperties
"""
super(ComputeContract, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.identity = kwargs.get('identity', None)
self.properties = kwargs.get('properties', None)
class ComputeIdentityContract(msrest.serialization.Model):
"""ComputeIdentityContract.
:ivar type:
:vartype type: str
:ivar system_identity_url:
:vartype system_identity_url: str
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar user_assigned_identities: This is a dictionary.
:vartype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'system_identity_url': {'key': 'systemIdentityUrl', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComputeRPUserAssignedIdentity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword system_identity_url:
:paramtype system_identity_url: str
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword user_assigned_identities: This is a dictionary.
:paramtype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
super(ComputeIdentityContract, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.system_identity_url = kwargs.get('system_identity_url', None)
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.client_id = kwargs.get('client_id', None)
self.client_secret_url = kwargs.get('client_secret_url', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class ComputeIdentityDto(msrest.serialization.Model):
"""ComputeIdentityDto.
:ivar compute_name:
:vartype compute_name: str
:ivar compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:vartype compute_target_type: str or ~flow.models.ComputeTargetType
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'compute_name': {'key': 'computeName', 'type': 'str'},
'compute_target_type': {'key': 'computeTargetType', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_name:
:paramtype compute_name: str
:keyword compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:paramtype compute_target_type: str or ~flow.models.ComputeTargetType
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(ComputeIdentityDto, self).__init__(**kwargs)
self.compute_name = kwargs.get('compute_name', None)
self.compute_target_type = kwargs.get('compute_target_type', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class ComputeInfo(msrest.serialization.Model):
"""ComputeInfo.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
"""
super(ComputeInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.cluster_purpose = kwargs.get('cluster_purpose', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
class ComputeProperties(msrest.serialization.Model):
"""ComputeProperties.
All required parameters must be populated in order to send to Azure.
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar disable_local_auth:
:vartype disable_local_auth: bool
:ivar description:
:vartype description: str
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type: Required.
:vartype compute_type: str
:ivar compute_location:
:vartype compute_location: str
:ivar provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or ~flow.models.ProvisioningState
:ivar provisioning_errors:
:vartype provisioning_errors: list[~flow.models.ODataErrorResponse]
:ivar provisioning_warnings: This is a dictionary.
:vartype provisioning_warnings: dict[str, str]
:ivar is_attached_compute:
:vartype is_attached_compute: bool
:ivar properties: Any object.
:vartype properties: any
:ivar status:
:vartype status: ~flow.models.ComputeStatus
:ivar warnings:
:vartype warnings: list[~flow.models.ComputeWarning]
"""
_validation = {
'compute_type': {'required': True, 'min_length': 1},
}
_attribute_map = {
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_location': {'key': 'computeLocation', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ODataErrorResponse]'},
'provisioning_warnings': {'key': 'provisioningWarnings', 'type': '{str}'},
'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
'properties': {'key': 'properties', 'type': 'object'},
'status': {'key': 'status', 'type': 'ComputeStatus'},
'warnings': {'key': 'warnings', 'type': '[ComputeWarning]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword disable_local_auth:
:paramtype disable_local_auth: bool
:keyword description:
:paramtype description: str
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type: Required.
:paramtype compute_type: str
:keyword compute_location:
:paramtype compute_location: str
:keyword provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:paramtype provisioning_state: str or ~flow.models.ProvisioningState
:keyword provisioning_errors:
:paramtype provisioning_errors: list[~flow.models.ODataErrorResponse]
:keyword provisioning_warnings: This is a dictionary.
:paramtype provisioning_warnings: dict[str, str]
:keyword is_attached_compute:
:paramtype is_attached_compute: bool
:keyword properties: Any object.
:paramtype properties: any
:keyword status:
:paramtype status: ~flow.models.ComputeStatus
:keyword warnings:
:paramtype warnings: list[~flow.models.ComputeWarning]
"""
super(ComputeProperties, self).__init__(**kwargs)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.disable_local_auth = kwargs.get('disable_local_auth', None)
self.description = kwargs.get('description', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs['compute_type']
self.compute_location = kwargs.get('compute_location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.provisioning_errors = kwargs.get('provisioning_errors', None)
self.provisioning_warnings = kwargs.get('provisioning_warnings', None)
self.is_attached_compute = kwargs.get('is_attached_compute', None)
self.properties = kwargs.get('properties', None)
self.status = kwargs.get('status', None)
self.warnings = kwargs.get('warnings', None)
class ComputeRequest(msrest.serialization.Model):
"""ComputeRequest.
:ivar node_count:
:vartype node_count: int
:ivar gpu_count:
:vartype gpu_count: int
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword gpu_count:
:paramtype gpu_count: int
"""
super(ComputeRequest, self).__init__(**kwargs)
self.node_count = kwargs.get('node_count', None)
self.gpu_count = kwargs.get('gpu_count', None)
class ComputeRPUserAssignedIdentity(msrest.serialization.Model):
"""ComputeRPUserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar resource_id:
:vartype resource_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword resource_id:
:paramtype resource_id: str
"""
super(ComputeRPUserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.client_id = kwargs.get('client_id', None)
self.client_secret_url = kwargs.get('client_secret_url', None)
self.resource_id = kwargs.get('resource_id', None)
class ComputeSetting(msrest.serialization.Model):
"""ComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.ComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.MlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'BatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'RemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'HdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'MlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'DatabricksComputeInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.ComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.MlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
super(ComputeSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_type = kwargs.get('compute_type', None)
self.batch_ai_compute_info = kwargs.get('batch_ai_compute_info', None)
self.remote_docker_compute_info = kwargs.get('remote_docker_compute_info', None)
self.hdi_cluster_compute_info = kwargs.get('hdi_cluster_compute_info', None)
self.mlc_compute_info = kwargs.get('mlc_compute_info', None)
self.databricks_compute_info = kwargs.get('databricks_compute_info', None)
class ComputeStatus(msrest.serialization.Model):
"""ComputeStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar is_status_available:
:vartype is_status_available: bool
:ivar detailed_status: Anything.
:vartype detailed_status: any
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_validation = {
'is_status_available': {'readonly': True},
}
_attribute_map = {
'is_status_available': {'key': 'isStatusAvailable', 'type': 'bool'},
'detailed_status': {'key': 'detailedStatus', 'type': 'object'},
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword detailed_status: Anything.
:paramtype detailed_status: any
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ComputeStatus, self).__init__(**kwargs)
self.is_status_available = None
self.detailed_status = kwargs.get('detailed_status', None)
self.error = kwargs.get('error', None)
class ComputeStatusDetail(msrest.serialization.Model):
"""ComputeStatusDetail.
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar provisioning_error_message:
:vartype provisioning_error_message: str
"""
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_error_message': {'key': 'provisioningErrorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword provisioning_error_message:
:paramtype provisioning_error_message: str
"""
super(ComputeStatusDetail, self).__init__(**kwargs)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.provisioning_error_message = kwargs.get('provisioning_error_message', None)
class ComputeWarning(msrest.serialization.Model):
"""ComputeWarning.
:ivar title:
:vartype title: str
:ivar message:
:vartype message: str
:ivar code:
:vartype code: str
:ivar severity: Possible values include: "Critical", "Error", "Warning", "Info".
:vartype severity: str or ~flow.models.SeverityLevel
"""
_attribute_map = {
'title': {'key': 'title', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword title:
:paramtype title: str
:keyword message:
:paramtype message: str
:keyword code:
:paramtype code: str
:keyword severity: Possible values include: "Critical", "Error", "Warning", "Info".
:paramtype severity: str or ~flow.models.SeverityLevel
"""
super(ComputeWarning, self).__init__(**kwargs)
self.title = kwargs.get('title', None)
self.message = kwargs.get('message', None)
self.code = kwargs.get('code', None)
self.severity = kwargs.get('severity', None)
class ConnectionConfigSpec(msrest.serialization.Model):
"""ConnectionConfigSpec.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar description:
:vartype description: str
:ivar default_value:
:vartype default_value: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword description:
:paramtype description: str
:keyword default_value:
:paramtype default_value: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ConnectionConfigSpec, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.config_value_type = kwargs.get('config_value_type', None)
self.description = kwargs.get('description', None)
self.default_value = kwargs.get('default_value', None)
self.enum_values = kwargs.get('enum_values', None)
self.is_optional = kwargs.get('is_optional', None)
class ConnectionDto(msrest.serialization.Model):
"""ConnectionDto.
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionDto, self).__init__(**kwargs)
self.connection_name = kwargs.get('connection_name', None)
self.connection_type = kwargs.get('connection_type', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
self.owner = kwargs.get('owner', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ConnectionEntity(msrest.serialization.Model):
"""ConnectionEntity.
:ivar connection_id:
:vartype connection_id: str
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar secret_name:
:vartype secret_name: str
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_id': {'key': 'connectionId', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'secret_name': {'key': 'secretName', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_id:
:paramtype connection_id: str
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword secret_name:
:paramtype secret_name: str
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionEntity, self).__init__(**kwargs)
self.connection_id = kwargs.get('connection_id', None)
self.connection_name = kwargs.get('connection_name', None)
self.connection_type = kwargs.get('connection_type', None)
self.connection_scope = kwargs.get('connection_scope', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
self.secret_name = kwargs.get('secret_name', None)
self.owner = kwargs.get('owner', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ConnectionOverrideSetting(msrest.serialization.Model):
"""ConnectionOverrideSetting.
:ivar connection_source_type: Possible values include: "Node", "NodeInput".
:vartype connection_source_type: str or ~flow.models.ConnectionSourceType
:ivar node_name:
:vartype node_name: str
:ivar node_input_name:
:vartype node_input_name: str
:ivar node_deployment_name_input:
:vartype node_deployment_name_input: str
:ivar node_model_input:
:vartype node_model_input: str
:ivar connection_name:
:vartype connection_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar model:
:vartype model: str
:ivar connection_types:
:vartype connection_types: list[str or ~flow.models.ConnectionType]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar model_enum:
:vartype model_enum: list[str]
"""
_attribute_map = {
'connection_source_type': {'key': 'connectionSourceType', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'node_input_name': {'key': 'nodeInputName', 'type': 'str'},
'node_deployment_name_input': {'key': 'nodeDeploymentNameInput', 'type': 'str'},
'node_model_input': {'key': 'nodeModelInput', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'model': {'key': 'model', 'type': 'str'},
'connection_types': {'key': 'connectionTypes', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'model_enum': {'key': 'modelEnum', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_source_type: Possible values include: "Node", "NodeInput".
:paramtype connection_source_type: str or ~flow.models.ConnectionSourceType
:keyword node_name:
:paramtype node_name: str
:keyword node_input_name:
:paramtype node_input_name: str
:keyword node_deployment_name_input:
:paramtype node_deployment_name_input: str
:keyword node_model_input:
:paramtype node_model_input: str
:keyword connection_name:
:paramtype connection_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword model:
:paramtype model: str
:keyword connection_types:
:paramtype connection_types: list[str or ~flow.models.ConnectionType]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword model_enum:
:paramtype model_enum: list[str]
"""
super(ConnectionOverrideSetting, self).__init__(**kwargs)
self.connection_source_type = kwargs.get('connection_source_type', None)
self.node_name = kwargs.get('node_name', None)
self.node_input_name = kwargs.get('node_input_name', None)
self.node_deployment_name_input = kwargs.get('node_deployment_name_input', None)
self.node_model_input = kwargs.get('node_model_input', None)
self.connection_name = kwargs.get('connection_name', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.model = kwargs.get('model', None)
self.connection_types = kwargs.get('connection_types', None)
self.capabilities = kwargs.get('capabilities', None)
self.model_enum = kwargs.get('model_enum', None)
class ConnectionSpec(msrest.serialization.Model):
"""ConnectionSpec.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
super(ConnectionSpec, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.config_specs = kwargs.get('config_specs', None)
class ContainerInstanceConfiguration(msrest.serialization.Model):
"""ContainerInstanceConfiguration.
:ivar region:
:vartype region: str
:ivar cpu_cores:
:vartype cpu_cores: float
:ivar memory_gb:
:vartype memory_gb: float
"""
_attribute_map = {
'region': {'key': 'region', 'type': 'str'},
'cpu_cores': {'key': 'cpuCores', 'type': 'float'},
'memory_gb': {'key': 'memoryGb', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword region:
:paramtype region: str
:keyword cpu_cores:
:paramtype cpu_cores: float
:keyword memory_gb:
:paramtype memory_gb: float
"""
super(ContainerInstanceConfiguration, self).__init__(**kwargs)
self.region = kwargs.get('region', None)
self.cpu_cores = kwargs.get('cpu_cores', None)
self.memory_gb = kwargs.get('memory_gb', None)
class ContainerRegistry(msrest.serialization.Model):
"""ContainerRegistry.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar credential_type:
:vartype credential_type: str
:ivar registry_identity:
:vartype registry_identity: ~flow.models.RegistryIdentity
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'credential_type': {'key': 'credentialType', 'type': 'str'},
'registry_identity': {'key': 'registryIdentity', 'type': 'RegistryIdentity'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword credential_type:
:paramtype credential_type: str
:keyword registry_identity:
:paramtype registry_identity: ~flow.models.RegistryIdentity
"""
super(ContainerRegistry, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.credential_type = kwargs.get('credential_type', None)
self.registry_identity = kwargs.get('registry_identity', None)
class ContainerResourceRequirements(msrest.serialization.Model):
"""ContainerResourceRequirements.
:ivar cpu:
:vartype cpu: float
:ivar cpu_limit:
:vartype cpu_limit: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar memory_in_gb_limit:
:vartype memory_in_gb_limit: float
:ivar gpu_enabled:
:vartype gpu_enabled: bool
:ivar gpu:
:vartype gpu: int
:ivar fpga:
:vartype fpga: int
"""
_attribute_map = {
'cpu': {'key': 'cpu', 'type': 'float'},
'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
'gpu_enabled': {'key': 'gpuEnabled', 'type': 'bool'},
'gpu': {'key': 'gpu', 'type': 'int'},
'fpga': {'key': 'fpga', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cpu:
:paramtype cpu: float
:keyword cpu_limit:
:paramtype cpu_limit: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword memory_in_gb_limit:
:paramtype memory_in_gb_limit: float
:keyword gpu_enabled:
:paramtype gpu_enabled: bool
:keyword gpu:
:paramtype gpu: int
:keyword fpga:
:paramtype fpga: int
"""
super(ContainerResourceRequirements, self).__init__(**kwargs)
self.cpu = kwargs.get('cpu', None)
self.cpu_limit = kwargs.get('cpu_limit', None)
self.memory_in_gb = kwargs.get('memory_in_gb', None)
self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None)
self.gpu_enabled = kwargs.get('gpu_enabled', None)
self.gpu = kwargs.get('gpu', None)
self.fpga = kwargs.get('fpga', None)
class ControlInput(msrest.serialization.Model):
"""ControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.ControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.ControlInputValue
"""
super(ControlInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.default_value = kwargs.get('default_value', None)
class ControlOutput(msrest.serialization.Model):
"""ControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(ControlOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class CopyDataTask(msrest.serialization.Model):
"""CopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(CopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class CreatedBy(msrest.serialization.Model):
"""CreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
"""
super(CreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
class CreatedFromDto(msrest.serialization.Model):
"""CreatedFromDto.
:ivar type: The only acceptable values to pass in are None and "Notebook". The default value
is None.
:vartype type: str
:ivar location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:vartype location_type: str
:ivar location:
:vartype location: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The only acceptable values to pass in are None and "Notebook". The default
value is None.
:paramtype type: str
:keyword location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:paramtype location_type: str
:keyword location:
:paramtype location: str
"""
super(CreatedFromDto, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.location_type = kwargs.get('location_type', None)
self.location = kwargs.get('location', None)
class CreateFlowFromSampleRequest(msrest.serialization.Model):
"""CreateFlowFromSampleRequest.
:ivar flow_name:
:vartype flow_name: str
:ivar sample_resource_id:
:vartype sample_resource_id: str
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar is_archived:
:vartype is_archived: bool
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword sample_resource_id:
:paramtype sample_resource_id: str
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword is_archived:
:paramtype is_archived: bool
"""
super(CreateFlowFromSampleRequest, self).__init__(**kwargs)
self.flow_name = kwargs.get('flow_name', None)
self.sample_resource_id = kwargs.get('sample_resource_id', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.tags = kwargs.get('tags', None)
self.is_archived = kwargs.get('is_archived', None)
class CreateFlowRequest(msrest.serialization.Model):
"""CreateFlowRequest.
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(CreateFlowRequest, self).__init__(**kwargs)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class CreateFlowRuntimeRequest(msrest.serialization.Model):
"""CreateFlowRuntimeRequest.
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(CreateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_type = kwargs.get('runtime_type', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.environment = kwargs.get('environment', None)
self.instance_count = kwargs.get('instance_count', None)
class CreateFlowSessionRequest(msrest.serialization.Model):
"""CreateFlowSessionRequest.
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar base_image:
:vartype base_image: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'action': {'key': 'action', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword base_image:
:paramtype base_image: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword identity:
:paramtype identity: str
"""
super(CreateFlowSessionRequest, self).__init__(**kwargs)
self.python_pip_requirements = kwargs.get('python_pip_requirements', None)
self.base_image = kwargs.get('base_image', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.action = kwargs.get('action', None)
self.identity = kwargs.get('identity', None)
class CreateInferencePipelineRequest(msrest.serialization.Model):
"""CreateInferencePipelineRequest.
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar training_pipeline_draft_name:
:vartype training_pipeline_draft_name: str
:ivar training_pipeline_run_display_name:
:vartype training_pipeline_run_display_name: str
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'training_pipeline_draft_name': {'key': 'trainingPipelineDraftName', 'type': 'str'},
'training_pipeline_run_display_name': {'key': 'trainingPipelineRunDisplayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword training_pipeline_draft_name:
:paramtype training_pipeline_draft_name: str
:keyword training_pipeline_run_display_name:
:paramtype training_pipeline_run_display_name: str
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreateInferencePipelineRequest, self).__init__(**kwargs)
self.module_node_id = kwargs.get('module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.training_pipeline_draft_name = kwargs.get('training_pipeline_draft_name', None)
self.training_pipeline_run_display_name = kwargs.get('training_pipeline_run_display_name', None)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreateOrUpdateConnectionRequest(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequest.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequest, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.connection_scope = kwargs.get('connection_scope', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
class CreateOrUpdateConnectionRequestDto(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequestDto.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequestDto, self).__init__(**kwargs)
self.connection_type = kwargs.get('connection_type', None)
self.configs = kwargs.get('configs', None)
self.custom_configs = kwargs.get('custom_configs', None)
self.expiry_time = kwargs.get('expiry_time', None)
class CreatePipelineDraftRequest(msrest.serialization.Model):
"""CreatePipelineDraftRequest.
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePipelineDraftRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreatePipelineJobScheduleDto(msrest.serialization.Model):
"""CreatePipelineJobScheduleDto.
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CreatePipelineJobScheduleDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class CreatePublishedPipelineRequest(msrest.serialization.Model):
"""CreatePublishedPipelineRequest.
:ivar use_pipeline_endpoint:
:vartype use_pipeline_endpoint: bool
:ivar pipeline_name:
:vartype pipeline_name: str
:ivar pipeline_description:
:vartype pipeline_description: str
:ivar use_existing_pipeline_endpoint:
:vartype use_existing_pipeline_endpoint: bool
:ivar pipeline_endpoint_name:
:vartype pipeline_endpoint_name: str
:ivar pipeline_endpoint_description:
:vartype pipeline_endpoint_description: str
:ivar set_as_default_pipeline_for_endpoint:
:vartype set_as_default_pipeline_for_endpoint: bool
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'use_pipeline_endpoint': {'key': 'usePipelineEndpoint', 'type': 'bool'},
'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
'pipeline_description': {'key': 'pipelineDescription', 'type': 'str'},
'use_existing_pipeline_endpoint': {'key': 'useExistingPipelineEndpoint', 'type': 'bool'},
'pipeline_endpoint_name': {'key': 'pipelineEndpointName', 'type': 'str'},
'pipeline_endpoint_description': {'key': 'pipelineEndpointDescription', 'type': 'str'},
'set_as_default_pipeline_for_endpoint': {'key': 'setAsDefaultPipelineForEndpoint', 'type': 'bool'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_pipeline_endpoint:
:paramtype use_pipeline_endpoint: bool
:keyword pipeline_name:
:paramtype pipeline_name: str
:keyword pipeline_description:
:paramtype pipeline_description: str
:keyword use_existing_pipeline_endpoint:
:paramtype use_existing_pipeline_endpoint: bool
:keyword pipeline_endpoint_name:
:paramtype pipeline_endpoint_name: str
:keyword pipeline_endpoint_description:
:paramtype pipeline_endpoint_description: str
:keyword set_as_default_pipeline_for_endpoint:
:paramtype set_as_default_pipeline_for_endpoint: bool
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePublishedPipelineRequest, self).__init__(**kwargs)
self.use_pipeline_endpoint = kwargs.get('use_pipeline_endpoint', None)
self.pipeline_name = kwargs.get('pipeline_name', None)
self.pipeline_description = kwargs.get('pipeline_description', None)
self.use_existing_pipeline_endpoint = kwargs.get('use_existing_pipeline_endpoint', None)
self.pipeline_endpoint_name = kwargs.get('pipeline_endpoint_name', None)
self.pipeline_endpoint_description = kwargs.get('pipeline_endpoint_description', None)
self.set_as_default_pipeline_for_endpoint = kwargs.get('set_as_default_pipeline_for_endpoint', None)
self.step_tags = kwargs.get('step_tags', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.enable_notification = kwargs.get('enable_notification', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.display_name = kwargs.get('display_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class CreateRealTimeEndpointRequest(msrest.serialization.Model):
"""CreateRealTimeEndpointRequest.
:ivar name:
:vartype name: str
:ivar compute_info:
:vartype compute_info: ~flow.models.ComputeInfo
:ivar description:
:vartype description: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar aks_advance_settings:
:vartype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:ivar aci_advance_settings:
:vartype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:ivar linked_training_pipeline_run_id:
:vartype linked_training_pipeline_run_id: str
:ivar linked_experiment_name:
:vartype linked_experiment_name: str
:ivar graph_nodes_run_id_mapping: This is a dictionary.
:vartype graph_nodes_run_id_mapping: dict[str, str]
:ivar workflow:
:vartype workflow: ~flow.models.PipelineGraph
:ivar inputs:
:vartype inputs: list[~flow.models.InputOutputPortMetadata]
:ivar outputs:
:vartype outputs: list[~flow.models.InputOutputPortMetadata]
:ivar example_request:
:vartype example_request: ~flow.models.ExampleRequest
:ivar user_storage_connection_string:
:vartype user_storage_connection_string: str
:ivar user_storage_endpoint_uri:
:vartype user_storage_endpoint_uri: str
:ivar user_storage_workspace_sai_token:
:vartype user_storage_workspace_sai_token: str
:ivar user_storage_container_name:
:vartype user_storage_container_name: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_info': {'key': 'computeInfo', 'type': 'ComputeInfo'},
'description': {'key': 'description', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'aks_advance_settings': {'key': 'aksAdvanceSettings', 'type': 'AKSAdvanceSettings'},
'aci_advance_settings': {'key': 'aciAdvanceSettings', 'type': 'ACIAdvanceSettings'},
'linked_training_pipeline_run_id': {'key': 'linkedTrainingPipelineRunId', 'type': 'str'},
'linked_experiment_name': {'key': 'linkedExperimentName', 'type': 'str'},
'graph_nodes_run_id_mapping': {'key': 'graphNodesRunIdMapping', 'type': '{str}'},
'workflow': {'key': 'workflow', 'type': 'PipelineGraph'},
'inputs': {'key': 'inputs', 'type': '[InputOutputPortMetadata]'},
'outputs': {'key': 'outputs', 'type': '[InputOutputPortMetadata]'},
'example_request': {'key': 'exampleRequest', 'type': 'ExampleRequest'},
'user_storage_connection_string': {'key': 'userStorageConnectionString', 'type': 'str'},
'user_storage_endpoint_uri': {'key': 'userStorageEndpointUri', 'type': 'str'},
'user_storage_workspace_sai_token': {'key': 'userStorageWorkspaceSaiToken', 'type': 'str'},
'user_storage_container_name': {'key': 'userStorageContainerName', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_info:
:paramtype compute_info: ~flow.models.ComputeInfo
:keyword description:
:paramtype description: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword aks_advance_settings:
:paramtype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:keyword aci_advance_settings:
:paramtype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:keyword linked_training_pipeline_run_id:
:paramtype linked_training_pipeline_run_id: str
:keyword linked_experiment_name:
:paramtype linked_experiment_name: str
:keyword graph_nodes_run_id_mapping: This is a dictionary.
:paramtype graph_nodes_run_id_mapping: dict[str, str]
:keyword workflow:
:paramtype workflow: ~flow.models.PipelineGraph
:keyword inputs:
:paramtype inputs: list[~flow.models.InputOutputPortMetadata]
:keyword outputs:
:paramtype outputs: list[~flow.models.InputOutputPortMetadata]
:keyword example_request:
:paramtype example_request: ~flow.models.ExampleRequest
:keyword user_storage_connection_string:
:paramtype user_storage_connection_string: str
:keyword user_storage_endpoint_uri:
:paramtype user_storage_endpoint_uri: str
:keyword user_storage_workspace_sai_token:
:paramtype user_storage_workspace_sai_token: str
:keyword user_storage_container_name:
:paramtype user_storage_container_name: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(CreateRealTimeEndpointRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.compute_info = kwargs.get('compute_info', None)
self.description = kwargs.get('description', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.aks_advance_settings = kwargs.get('aks_advance_settings', None)
self.aci_advance_settings = kwargs.get('aci_advance_settings', None)
self.linked_training_pipeline_run_id = kwargs.get('linked_training_pipeline_run_id', None)
self.linked_experiment_name = kwargs.get('linked_experiment_name', None)
self.graph_nodes_run_id_mapping = kwargs.get('graph_nodes_run_id_mapping', None)
self.workflow = kwargs.get('workflow', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.example_request = kwargs.get('example_request', None)
self.user_storage_connection_string = kwargs.get('user_storage_connection_string', None)
self.user_storage_endpoint_uri = kwargs.get('user_storage_endpoint_uri', None)
self.user_storage_workspace_sai_token = kwargs.get('user_storage_workspace_sai_token', None)
self.user_storage_container_name = kwargs.get('user_storage_container_name', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class CreationContext(msrest.serialization.Model):
"""CreationContext.
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar creation_source:
:vartype creation_source: str
"""
_attribute_map = {
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'creation_source': {'key': 'creationSource', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword creation_source:
:paramtype creation_source: str
"""
super(CreationContext, self).__init__(**kwargs)
self.created_time = kwargs.get('created_time', None)
self.created_by = kwargs.get('created_by', None)
self.creation_source = kwargs.get('creation_source', None)
class Cron(msrest.serialization.Model):
"""Cron.
:ivar expression:
:vartype expression: str
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'expression': {'key': 'expression', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword expression:
:paramtype expression: str
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Cron, self).__init__(**kwargs)
self.expression = kwargs.get('expression', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
class CustomConnectionConfig(msrest.serialization.Model):
"""CustomConnectionConfig.
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar value:
:vartype value: str
"""
_attribute_map = {
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword value:
:paramtype value: str
"""
super(CustomConnectionConfig, self).__init__(**kwargs)
self.config_value_type = kwargs.get('config_value_type', None)
self.value = kwargs.get('value', None)
class CustomReference(msrest.serialization.Model):
"""CustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(CustomReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class Data(msrest.serialization.Model):
"""Data.
:ivar data_location:
:vartype data_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'data_location': {'key': 'dataLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_location:
:paramtype data_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(Data, self).__init__(**kwargs)
self.data_location = kwargs.get('data_location', None)
self.mechanism = kwargs.get('mechanism', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.options = kwargs.get('options', None)
class DatabaseSink(msrest.serialization.Model):
"""DatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(DatabaseSink, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.table = kwargs.get('table', None)
class DatabaseSource(msrest.serialization.Model):
"""DatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
super(DatabaseSource, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.query = kwargs.get('query', None)
self.stored_procedure_name = kwargs.get('stored_procedure_name', None)
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
class DatabricksComputeInfo(msrest.serialization.Model):
"""DatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(DatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = kwargs.get('existing_cluster_id', None)
class DatabricksConfiguration(msrest.serialization.Model):
"""DatabricksConfiguration.
:ivar workers:
:vartype workers: int
:ivar minimum_worker_count:
:vartype minimum_worker_count: int
:ivar max_mum_worker_count:
:vartype max_mum_worker_count: int
:ivar spark_version:
:vartype spark_version: str
:ivar node_type_id:
:vartype node_type_id: str
:ivar spark_conf: Dictionary of :code:`<string>`.
:vartype spark_conf: dict[str, str]
:ivar spark_env_vars: Dictionary of :code:`<string>`.
:vartype spark_env_vars: dict[str, str]
:ivar cluster_log_conf_dbfs_path:
:vartype cluster_log_conf_dbfs_path: str
:ivar dbfs_init_scripts:
:vartype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:ivar instance_pool_id:
:vartype instance_pool_id: str
:ivar timeout_seconds:
:vartype timeout_seconds: int
:ivar notebook_task:
:vartype notebook_task: ~flow.models.NoteBookTaskDto
:ivar spark_python_task:
:vartype spark_python_task: ~flow.models.SparkPythonTaskDto
:ivar spark_jar_task:
:vartype spark_jar_task: ~flow.models.SparkJarTaskDto
:ivar spark_submit_task:
:vartype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:ivar jar_libraries:
:vartype jar_libraries: list[str]
:ivar egg_libraries:
:vartype egg_libraries: list[str]
:ivar whl_libraries:
:vartype whl_libraries: list[str]
:ivar pypi_libraries:
:vartype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar r_cran_libraries:
:vartype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar maven_libraries:
:vartype maven_libraries: list[~flow.models.MavenLibraryDto]
:ivar libraries:
:vartype libraries: list[any]
:ivar linked_adb_workspace_metadata:
:vartype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:ivar databrick_resource_id:
:vartype databrick_resource_id: str
:ivar auto_scale:
:vartype auto_scale: bool
"""
_attribute_map = {
'workers': {'key': 'workers', 'type': 'int'},
'minimum_worker_count': {'key': 'minimumWorkerCount', 'type': 'int'},
'max_mum_worker_count': {'key': 'maxMumWorkerCount', 'type': 'int'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
'node_type_id': {'key': 'nodeTypeId', 'type': 'str'},
'spark_conf': {'key': 'sparkConf', 'type': '{str}'},
'spark_env_vars': {'key': 'sparkEnvVars', 'type': '{str}'},
'cluster_log_conf_dbfs_path': {'key': 'clusterLogConfDbfsPath', 'type': 'str'},
'dbfs_init_scripts': {'key': 'dbfsInitScripts', 'type': '[InitScriptInfoDto]'},
'instance_pool_id': {'key': 'instancePoolId', 'type': 'str'},
'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
'notebook_task': {'key': 'notebookTask', 'type': 'NoteBookTaskDto'},
'spark_python_task': {'key': 'sparkPythonTask', 'type': 'SparkPythonTaskDto'},
'spark_jar_task': {'key': 'sparkJarTask', 'type': 'SparkJarTaskDto'},
'spark_submit_task': {'key': 'sparkSubmitTask', 'type': 'SparkSubmitTaskDto'},
'jar_libraries': {'key': 'jarLibraries', 'type': '[str]'},
'egg_libraries': {'key': 'eggLibraries', 'type': '[str]'},
'whl_libraries': {'key': 'whlLibraries', 'type': '[str]'},
'pypi_libraries': {'key': 'pypiLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'r_cran_libraries': {'key': 'rCranLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'maven_libraries': {'key': 'mavenLibraries', 'type': '[MavenLibraryDto]'},
'libraries': {'key': 'libraries', 'type': '[object]'},
'linked_adb_workspace_metadata': {'key': 'linkedADBWorkspaceMetadata', 'type': 'LinkedADBWorkspaceMetadata'},
'databrick_resource_id': {'key': 'databrickResourceId', 'type': 'str'},
'auto_scale': {'key': 'autoScale', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workers:
:paramtype workers: int
:keyword minimum_worker_count:
:paramtype minimum_worker_count: int
:keyword max_mum_worker_count:
:paramtype max_mum_worker_count: int
:keyword spark_version:
:paramtype spark_version: str
:keyword node_type_id:
:paramtype node_type_id: str
:keyword spark_conf: Dictionary of :code:`<string>`.
:paramtype spark_conf: dict[str, str]
:keyword spark_env_vars: Dictionary of :code:`<string>`.
:paramtype spark_env_vars: dict[str, str]
:keyword cluster_log_conf_dbfs_path:
:paramtype cluster_log_conf_dbfs_path: str
:keyword dbfs_init_scripts:
:paramtype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:keyword instance_pool_id:
:paramtype instance_pool_id: str
:keyword timeout_seconds:
:paramtype timeout_seconds: int
:keyword notebook_task:
:paramtype notebook_task: ~flow.models.NoteBookTaskDto
:keyword spark_python_task:
:paramtype spark_python_task: ~flow.models.SparkPythonTaskDto
:keyword spark_jar_task:
:paramtype spark_jar_task: ~flow.models.SparkJarTaskDto
:keyword spark_submit_task:
:paramtype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:keyword jar_libraries:
:paramtype jar_libraries: list[str]
:keyword egg_libraries:
:paramtype egg_libraries: list[str]
:keyword whl_libraries:
:paramtype whl_libraries: list[str]
:keyword pypi_libraries:
:paramtype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword r_cran_libraries:
:paramtype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword maven_libraries:
:paramtype maven_libraries: list[~flow.models.MavenLibraryDto]
:keyword libraries:
:paramtype libraries: list[any]
:keyword linked_adb_workspace_metadata:
:paramtype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:keyword databrick_resource_id:
:paramtype databrick_resource_id: str
:keyword auto_scale:
:paramtype auto_scale: bool
"""
super(DatabricksConfiguration, self).__init__(**kwargs)
self.workers = kwargs.get('workers', None)
self.minimum_worker_count = kwargs.get('minimum_worker_count', None)
self.max_mum_worker_count = kwargs.get('max_mum_worker_count', None)
self.spark_version = kwargs.get('spark_version', None)
self.node_type_id = kwargs.get('node_type_id', None)
self.spark_conf = kwargs.get('spark_conf', None)
self.spark_env_vars = kwargs.get('spark_env_vars', None)
self.cluster_log_conf_dbfs_path = kwargs.get('cluster_log_conf_dbfs_path', None)
self.dbfs_init_scripts = kwargs.get('dbfs_init_scripts', None)
self.instance_pool_id = kwargs.get('instance_pool_id', None)
self.timeout_seconds = kwargs.get('timeout_seconds', None)
self.notebook_task = kwargs.get('notebook_task', None)
self.spark_python_task = kwargs.get('spark_python_task', None)
self.spark_jar_task = kwargs.get('spark_jar_task', None)
self.spark_submit_task = kwargs.get('spark_submit_task', None)
self.jar_libraries = kwargs.get('jar_libraries', None)
self.egg_libraries = kwargs.get('egg_libraries', None)
self.whl_libraries = kwargs.get('whl_libraries', None)
self.pypi_libraries = kwargs.get('pypi_libraries', None)
self.r_cran_libraries = kwargs.get('r_cran_libraries', None)
self.maven_libraries = kwargs.get('maven_libraries', None)
self.libraries = kwargs.get('libraries', None)
self.linked_adb_workspace_metadata = kwargs.get('linked_adb_workspace_metadata', None)
self.databrick_resource_id = kwargs.get('databrick_resource_id', None)
self.auto_scale = kwargs.get('auto_scale', None)
class DatacacheConfiguration(msrest.serialization.Model):
"""DatacacheConfiguration.
:ivar datacache_id:
:vartype datacache_id: str
:ivar datacache_store:
:vartype datacache_store: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: The only acceptable values to pass in are None and "Mount". The default value is
None.
:vartype mode: str
:ivar replica:
:vartype replica: int
:ivar failure_fallback:
:vartype failure_fallback: bool
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'datacache_id': {'key': 'datacacheId', 'type': 'str'},
'datacache_store': {'key': 'datacacheStore', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'replica': {'key': 'replica', 'type': 'int'},
'failure_fallback': {'key': 'failureFallback', 'type': 'bool'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datacache_id:
:paramtype datacache_id: str
:keyword datacache_store:
:paramtype datacache_store: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: The only acceptable values to pass in are None and "Mount". The default value
is None.
:paramtype mode: str
:keyword replica:
:paramtype replica: int
:keyword failure_fallback:
:paramtype failure_fallback: bool
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatacacheConfiguration, self).__init__(**kwargs)
self.datacache_id = kwargs.get('datacache_id', None)
self.datacache_store = kwargs.get('datacache_store', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.mode = kwargs.get('mode', None)
self.replica = kwargs.get('replica', None)
self.failure_fallback = kwargs.get('failure_fallback', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class DataInfo(msrest.serialization.Model):
"""DataInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar id:
:vartype id: str
:ivar data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:vartype data_source_type: str or ~flow.models.DataSourceType
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar modified_date:
:vartype modified_date: ~datetime.datetime
:ivar registered_by:
:vartype registered_by: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar dataset_type:
:vartype dataset_type: str
:ivar saved_dataset_id:
:vartype saved_dataset_id: str
:ivar dataset_version_id:
:vartype dataset_version_id: str
:ivar is_visible:
:vartype is_visible: bool
:ivar is_registered:
:vartype is_registered: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, any]
:ivar connection_string:
:vartype connection_string: str
:ivar container_name:
:vartype container_name: str
:ivar data_storage_endpoint_uri:
:vartype data_storage_endpoint_uri: str
:ivar workspace_sai_token:
:vartype workspace_sai_token: str
:ivar aml_dataset_data_flow:
:vartype aml_dataset_data_flow: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar asset_type:
:vartype asset_type: str
:ivar is_data_v2:
:vartype is_data_v2: bool
:ivar asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed".
:vartype asset_scope_type: str or ~flow.models.AssetScopeTypes
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar output_port_name:
:vartype output_port_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'},
'dataset_version_id': {'key': 'datasetVersionId', 'type': 'str'},
'is_visible': {'key': 'isVisible', 'type': 'bool'},
'is_registered': {'key': 'isRegistered', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{object}'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'container_name': {'key': 'containerName', 'type': 'str'},
'data_storage_endpoint_uri': {'key': 'dataStorageEndpointUri', 'type': 'str'},
'workspace_sai_token': {'key': 'workspaceSaiToken', 'type': 'str'},
'aml_dataset_data_flow': {'key': 'amlDatasetDataFlow', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'asset_type': {'key': 'assetType', 'type': 'str'},
'is_data_v2': {'key': 'isDataV2', 'type': 'bool'},
'asset_scope_type': {'key': 'assetScopeType', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword id:
:paramtype id: str
:keyword data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:paramtype data_source_type: str or ~flow.models.DataSourceType
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword modified_date:
:paramtype modified_date: ~datetime.datetime
:keyword registered_by:
:paramtype registered_by: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword dataset_type:
:paramtype dataset_type: str
:keyword saved_dataset_id:
:paramtype saved_dataset_id: str
:keyword dataset_version_id:
:paramtype dataset_version_id: str
:keyword is_visible:
:paramtype is_visible: bool
:keyword is_registered:
:paramtype is_registered: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, any]
:keyword connection_string:
:paramtype connection_string: str
:keyword container_name:
:paramtype container_name: str
:keyword data_storage_endpoint_uri:
:paramtype data_storage_endpoint_uri: str
:keyword workspace_sai_token:
:paramtype workspace_sai_token: str
:keyword aml_dataset_data_flow:
:paramtype aml_dataset_data_flow: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword asset_type:
:paramtype asset_type: str
:keyword is_data_v2:
:paramtype is_data_v2: bool
:keyword asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed".
:paramtype asset_scope_type: str or ~flow.models.AssetScopeTypes
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword output_port_name:
:paramtype output_port_name: str
"""
super(DataInfo, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.id = kwargs.get('id', None)
self.data_source_type = kwargs.get('data_source_type', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.created_date = kwargs.get('created_date', None)
self.modified_date = kwargs.get('modified_date', None)
self.registered_by = kwargs.get('registered_by', None)
self.tags = kwargs.get('tags', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.data_reference_type = kwargs.get('data_reference_type', None)
self.dataset_type = kwargs.get('dataset_type', None)
self.saved_dataset_id = kwargs.get('saved_dataset_id', None)
self.dataset_version_id = kwargs.get('dataset_version_id', None)
self.is_visible = kwargs.get('is_visible', None)
self.is_registered = kwargs.get('is_registered', None)
self.properties = kwargs.get('properties', None)
self.connection_string = kwargs.get('connection_string', None)
self.container_name = kwargs.get('container_name', None)
self.data_storage_endpoint_uri = kwargs.get('data_storage_endpoint_uri', None)
self.workspace_sai_token = kwargs.get('workspace_sai_token', None)
self.aml_dataset_data_flow = kwargs.get('aml_dataset_data_flow', None)
self.system_data = kwargs.get('system_data', None)
self.arm_id = kwargs.get('arm_id', None)
self.asset_id = kwargs.get('asset_id', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.asset_type = kwargs.get('asset_type', None)
self.is_data_v2 = kwargs.get('is_data_v2', None)
self.asset_scope_type = kwargs.get('asset_scope_type', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.output_port_name = kwargs.get('output_port_name', None)
class DataLocation(msrest.serialization.Model):
"""DataLocation.
:ivar storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.DataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.DataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'DataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.DataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.DataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataLocation, self).__init__(**kwargs)
self.storage_type = kwargs.get('storage_type', None)
self.storage_id = kwargs.get('storage_id', None)
self.uri = kwargs.get('uri', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference = kwargs.get('data_reference', None)
self.aml_dataset = kwargs.get('aml_dataset', None)
self.asset_definition = kwargs.get('asset_definition', None)
class DataPath(msrest.serialization.Model):
"""DataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.SqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.SqlDataPath
"""
super(DataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.sql_data_path = kwargs.get('sql_data_path', None)
class DataPathParameter(msrest.serialization.Model):
"""DataPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.LegacyDataPath
:ivar is_optional:
:vartype is_optional: bool
:ivar data_type_id:
:vartype data_type_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'LegacyDataPath'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.LegacyDataPath
:keyword is_optional:
:paramtype is_optional: bool
:keyword data_type_id:
:paramtype data_type_id: str
"""
super(DataPathParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
self.data_type_id = kwargs.get('data_type_id', None)
class DataPortDto(msrest.serialization.Model):
"""DataPortDto.
:ivar data_port_type: Possible values include: "Input", "Output".
:vartype data_port_type: str or ~flow.models.DataPortType
:ivar data_port_name:
:vartype data_port_name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:vartype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:ivar data_store_intellectual_property_publisher:
:vartype data_store_intellectual_property_publisher: str
"""
_attribute_map = {
'data_port_type': {'key': 'dataPortType', 'type': 'str'},
'data_port_name': {'key': 'dataPortName', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_intellectual_property_access_mode': {'key': 'dataStoreIntellectualPropertyAccessMode', 'type': 'str'},
'data_store_intellectual_property_publisher': {'key': 'dataStoreIntellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_port_type: Possible values include: "Input", "Output".
:paramtype data_port_type: str or ~flow.models.DataPortType
:keyword data_port_name:
:paramtype data_port_name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:paramtype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:keyword data_store_intellectual_property_publisher:
:paramtype data_store_intellectual_property_publisher: str
"""
super(DataPortDto, self).__init__(**kwargs)
self.data_port_type = kwargs.get('data_port_type', None)
self.data_port_name = kwargs.get('data_port_name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_intellectual_property_access_mode = kwargs.get('data_store_intellectual_property_access_mode', None)
self.data_store_intellectual_property_publisher = kwargs.get('data_store_intellectual_property_publisher', None)
class DataReference(msrest.serialization.Model):
"""DataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:vartype type: str or ~flow.models.DataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AzureFilesReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.DBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.CustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.HdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AzureFilesReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'DBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'CustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'HdfsReference'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:paramtype type: str or ~flow.models.DataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AzureFilesReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.DBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.CustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.HdfsReference
"""
super(DataReference, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.azure_blob_reference = kwargs.get('azure_blob_reference', None)
self.azure_data_lake_reference = kwargs.get('azure_data_lake_reference', None)
self.azure_files_reference = kwargs.get('azure_files_reference', None)
self.azure_sql_database_reference = kwargs.get('azure_sql_database_reference', None)
self.azure_postgres_database_reference = kwargs.get('azure_postgres_database_reference', None)
self.azure_data_lake_gen2_reference = kwargs.get('azure_data_lake_gen2_reference', None)
self.dbfs_reference = kwargs.get('dbfs_reference', None)
self.azure_my_sql_database_reference = kwargs.get('azure_my_sql_database_reference', None)
self.custom_reference = kwargs.get('custom_reference', None)
self.hdfs_reference = kwargs.get('hdfs_reference', None)
class DataReferenceConfiguration(msrest.serialization.Model):
"""DataReferenceConfiguration.
:ivar data_store_name:
:vartype data_store_name: str
:ivar mode: Possible values include: "Mount", "Download", "Upload".
:vartype mode: str or ~flow.models.DataStoreMode
:ivar path_on_data_store:
:vartype path_on_data_store: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'path_on_data_store': {'key': 'pathOnDataStore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword mode: Possible values include: "Mount", "Download", "Upload".
:paramtype mode: str or ~flow.models.DataStoreMode
:keyword path_on_data_store:
:paramtype path_on_data_store: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
"""
super(DataReferenceConfiguration, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.mode = kwargs.get('mode', None)
self.path_on_data_store = kwargs.get('path_on_data_store', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
class DataSetDefinition(msrest.serialization.Model):
"""DataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.DataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'DataSetDefinitionValue'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.DataSetDefinitionValue
"""
super(DataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = kwargs.get('data_type_short_name', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.value = kwargs.get('value', None)
class DataSetDefinitionValue(msrest.serialization.Model):
"""DataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.DataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'DataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.DataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = kwargs.get('literal_value', None)
self.data_set_reference = kwargs.get('data_set_reference', None)
self.saved_data_set_reference = kwargs.get('saved_data_set_reference', None)
self.asset_definition = kwargs.get('asset_definition', None)
class DatasetIdentifier(msrest.serialization.Model):
"""DatasetIdentifier.
:ivar saved_id:
:vartype saved_id: str
:ivar registered_id:
:vartype registered_id: str
:ivar registered_version:
:vartype registered_version: str
"""
_attribute_map = {
'saved_id': {'key': 'savedId', 'type': 'str'},
'registered_id': {'key': 'registeredId', 'type': 'str'},
'registered_version': {'key': 'registeredVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword saved_id:
:paramtype saved_id: str
:keyword registered_id:
:paramtype registered_id: str
:keyword registered_version:
:paramtype registered_version: str
"""
super(DatasetIdentifier, self).__init__(**kwargs)
self.saved_id = kwargs.get('saved_id', None)
self.registered_id = kwargs.get('registered_id', None)
self.registered_version = kwargs.get('registered_version', None)
class DatasetInputDetails(msrest.serialization.Model):
"""DatasetInputDetails.
:ivar input_name:
:vartype input_name: str
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'input_name': {'key': 'inputName', 'type': 'str'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_name:
:paramtype input_name: str
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatasetInputDetails, self).__init__(**kwargs)
self.input_name = kwargs.get('input_name', None)
self.mechanism = kwargs.get('mechanism', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class DatasetLineage(msrest.serialization.Model):
"""DatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar consumption_type: Possible values include: "RunInput", "Reference".
:vartype consumption_type: str or ~flow.models.DatasetConsumptionType
:ivar input_details:
:vartype input_details: ~flow.models.DatasetInputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'consumption_type': {'key': 'consumptionType', 'type': 'str'},
'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword consumption_type: Possible values include: "RunInput", "Reference".
:paramtype consumption_type: str or ~flow.models.DatasetConsumptionType
:keyword input_details:
:paramtype input_details: ~flow.models.DatasetInputDetails
"""
super(DatasetLineage, self).__init__(**kwargs)
self.identifier = kwargs.get('identifier', None)
self.consumption_type = kwargs.get('consumption_type', None)
self.input_details = kwargs.get('input_details', None)
class DatasetOutput(msrest.serialization.Model):
"""DatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.DatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.DatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
super(DatasetOutput, self).__init__(**kwargs)
self.dataset_type = kwargs.get('dataset_type', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
class DatasetOutputDetails(msrest.serialization.Model):
"""DatasetOutputDetails.
:ivar output_name:
:vartype output_name: str
"""
_attribute_map = {
'output_name': {'key': 'outputName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_name:
:paramtype output_name: str
"""
super(DatasetOutputDetails, self).__init__(**kwargs)
self.output_name = kwargs.get('output_name', None)
class DatasetOutputOptions(msrest.serialization.Model):
"""DatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.GlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'GlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.GlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
super(DatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = kwargs.get('source_globs', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_datastore_parameter_assignment = kwargs.get('path_on_datastore_parameter_assignment', None)
class DataSetPathParameter(msrest.serialization.Model):
"""DataSetPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.DataSetDefinitionValue
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'DataSetDefinitionValue'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.DataSetDefinitionValue
:keyword is_optional:
:paramtype is_optional: bool
"""
super(DataSetPathParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
class DatasetRegistration(msrest.serialization.Model):
"""DatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(DatasetRegistration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.create_new_version = kwargs.get('create_new_version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class DatasetRegistrationOptions(msrest.serialization.Model):
"""DatasetRegistrationOptions.
:ivar additional_transformation:
:vartype additional_transformation: str
"""
_attribute_map = {
'additional_transformation': {'key': 'additionalTransformation', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword additional_transformation:
:paramtype additional_transformation: str
"""
super(DatasetRegistrationOptions, self).__init__(**kwargs)
self.additional_transformation = kwargs.get('additional_transformation', None)
class DataSettings(msrest.serialization.Model):
"""DataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.ValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.TestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'TestDataSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.ValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.TestDataSettings
"""
super(DataSettings, self).__init__(**kwargs)
self.target_column_name = kwargs.get('target_column_name', None)
self.weight_column_name = kwargs.get('weight_column_name', None)
self.positive_label = kwargs.get('positive_label', None)
self.validation_data = kwargs.get('validation_data', None)
self.test_data = kwargs.get('test_data', None)
class DatastoreSetting(msrest.serialization.Model):
"""DatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(DatastoreSetting, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
class DataTransferCloudConfiguration(msrest.serialization.Model):
"""DataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(DataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = kwargs.get('allow_overwrite', None)
class DataTransferSink(msrest.serialization.Model):
"""DataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.DatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'DatabaseSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.DatabaseSink
"""
super(DataTransferSink, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_sink = kwargs.get('database_sink', None)
class DataTransferSource(msrest.serialization.Model):
"""DataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_source:
:vartype database_source: ~flow.models.DatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'DatabaseSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.DatabaseSource
"""
super(DataTransferSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.file_system = kwargs.get('file_system', None)
self.database_source = kwargs.get('database_source', None)
class DataTransferV2CloudSetting(msrest.serialization.Model):
"""DataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.DataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.CopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.ImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.ExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'CopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'ImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'ExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{DataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{DataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.DataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.CopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.ImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.ExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(DataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = kwargs.get('task_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.copy_data_task = kwargs.get('copy_data_task', None)
self.import_data_task = kwargs.get('import_data_task', None)
self.export_data_task = kwargs.get('export_data_task', None)
self.data_transfer_sources = kwargs.get('data_transfer_sources', None)
self.data_transfer_sinks = kwargs.get('data_transfer_sinks', None)
self.data_copy_mode = kwargs.get('data_copy_mode', None)
class DataTypeCreationInfo(msrest.serialization.Model):
"""DataTypeCreationInfo.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar is_directory:
:vartype is_directory: bool
:ivar file_extension:
:vartype file_extension: str
:ivar parent_data_type_ids:
:vartype parent_data_type_ids: list[str]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_directory': {'key': 'isDirectory', 'type': 'bool'},
'file_extension': {'key': 'fileExtension', 'type': 'str'},
'parent_data_type_ids': {'key': 'parentDataTypeIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword is_directory:
:paramtype is_directory: bool
:keyword file_extension:
:paramtype file_extension: str
:keyword parent_data_type_ids:
:paramtype parent_data_type_ids: list[str]
"""
super(DataTypeCreationInfo, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.is_directory = kwargs.get('is_directory', None)
self.file_extension = kwargs.get('file_extension', None)
self.parent_data_type_ids = kwargs.get('parent_data_type_ids', None)
class DBFSReference(msrest.serialization.Model):
"""DBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(DBFSReference, self).__init__(**kwargs)
self.relative_path = kwargs.get('relative_path', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
class DbfsStorageInfoDto(msrest.serialization.Model):
"""DbfsStorageInfoDto.
:ivar destination:
:vartype destination: str
"""
_attribute_map = {
'destination': {'key': 'destination', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword destination:
:paramtype destination: str
"""
super(DbfsStorageInfoDto, self).__init__(**kwargs)
self.destination = kwargs.get('destination', None)
class DebugInfoResponse(msrest.serialization.Model):
"""Internal debugging information not intended for external clients.
:ivar type: The type.
:vartype type: str
:ivar message: The message.
:vartype message: str
:ivar stack_trace: The stack trace.
:vartype stack_trace: str
:ivar inner_exception: Internal debugging information not intended for external clients.
:vartype inner_exception: ~flow.models.DebugInfoResponse
:ivar data: This is a dictionary.
:vartype data: dict[str, any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'stack_trace': {'key': 'stackTrace', 'type': 'str'},
'inner_exception': {'key': 'innerException', 'type': 'DebugInfoResponse'},
'data': {'key': 'data', 'type': '{object}'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The type.
:paramtype type: str
:keyword message: The message.
:paramtype message: str
:keyword stack_trace: The stack trace.
:paramtype stack_trace: str
:keyword inner_exception: Internal debugging information not intended for external clients.
:paramtype inner_exception: ~flow.models.DebugInfoResponse
:keyword data: This is a dictionary.
:paramtype data: dict[str, any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
"""
super(DebugInfoResponse, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.message = kwargs.get('message', None)
self.stack_trace = kwargs.get('stack_trace', None)
self.inner_exception = kwargs.get('inner_exception', None)
self.data = kwargs.get('data', None)
self.error_response = kwargs.get('error_response', None)
class DeployFlowRequest(msrest.serialization.Model):
"""DeployFlowRequest.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar source_flow_run_id:
:vartype source_flow_run_id: str
:ivar source_flow_id:
:vartype source_flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar output_names_included_in_endpoint_response:
:vartype output_names_included_in_endpoint_response: list[str]
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar endpoint_description:
:vartype endpoint_description: str
:ivar auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:vartype auth_mode: str or ~flow.models.EndpointAuthMode
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar endpoint_tags: This is a dictionary.
:vartype endpoint_tags: dict[str, str]
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar environment:
:vartype environment: str
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar deployment_tags: This is a dictionary.
:vartype deployment_tags: dict[str, str]
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar enable_model_data_collector:
:vartype enable_model_data_collector: bool
:ivar skip_update_traffic_to_full:
:vartype skip_update_traffic_to_full: bool
:ivar enable_streaming_response:
:vartype enable_streaming_response: bool
:ivar use_flow_snapshot_to_deploy:
:vartype use_flow_snapshot_to_deploy: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar auto_grant_connection_permission:
:vartype auto_grant_connection_permission: bool
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'source_flow_run_id': {'key': 'sourceFlowRunId', 'type': 'str'},
'source_flow_id': {'key': 'sourceFlowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'output_names_included_in_endpoint_response': {'key': 'outputNamesIncludedInEndpointResponse', 'type': '[str]'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'endpoint_description': {'key': 'endpointDescription', 'type': 'str'},
'auth_mode': {'key': 'authMode', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'endpoint_tags': {'key': 'endpointTags', 'type': '{str}'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'deployment_tags': {'key': 'deploymentTags', 'type': '{str}'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'enable_model_data_collector': {'key': 'enableModelDataCollector', 'type': 'bool'},
'skip_update_traffic_to_full': {'key': 'skipUpdateTrafficToFull', 'type': 'bool'},
'enable_streaming_response': {'key': 'enableStreamingResponse', 'type': 'bool'},
'use_flow_snapshot_to_deploy': {'key': 'useFlowSnapshotToDeploy', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'auto_grant_connection_permission': {'key': 'autoGrantConnectionPermission', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword source_flow_run_id:
:paramtype source_flow_run_id: str
:keyword source_flow_id:
:paramtype source_flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword output_names_included_in_endpoint_response:
:paramtype output_names_included_in_endpoint_response: list[str]
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword endpoint_description:
:paramtype endpoint_description: str
:keyword auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:paramtype auth_mode: str or ~flow.models.EndpointAuthMode
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword endpoint_tags: This is a dictionary.
:paramtype endpoint_tags: dict[str, str]
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword environment:
:paramtype environment: str
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword deployment_tags: This is a dictionary.
:paramtype deployment_tags: dict[str, str]
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword enable_model_data_collector:
:paramtype enable_model_data_collector: bool
:keyword skip_update_traffic_to_full:
:paramtype skip_update_traffic_to_full: bool
:keyword enable_streaming_response:
:paramtype enable_streaming_response: bool
:keyword use_flow_snapshot_to_deploy:
:paramtype use_flow_snapshot_to_deploy: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword auto_grant_connection_permission:
:paramtype auto_grant_connection_permission: bool
"""
super(DeployFlowRequest, self).__init__(**kwargs)
self.source_resource_id = kwargs.get('source_resource_id', None)
self.source_flow_run_id = kwargs.get('source_flow_run_id', None)
self.source_flow_id = kwargs.get('source_flow_id', None)
self.flow = kwargs.get('flow', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.output_names_included_in_endpoint_response = kwargs.get('output_names_included_in_endpoint_response', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.endpoint_description = kwargs.get('endpoint_description', None)
self.auth_mode = kwargs.get('auth_mode', None)
self.identity = kwargs.get('identity', None)
self.endpoint_tags = kwargs.get('endpoint_tags', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.use_workspace_connection = kwargs.get('use_workspace_connection', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.environment = kwargs.get('environment', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.deployment_tags = kwargs.get('deployment_tags', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.enable_model_data_collector = kwargs.get('enable_model_data_collector', None)
self.skip_update_traffic_to_full = kwargs.get('skip_update_traffic_to_full', None)
self.enable_streaming_response = kwargs.get('enable_streaming_response', None)
self.use_flow_snapshot_to_deploy = kwargs.get('use_flow_snapshot_to_deploy', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.auto_grant_connection_permission = kwargs.get('auto_grant_connection_permission', None)
class DeploymentInfo(msrest.serialization.Model):
"""DeploymentInfo.
:ivar operation_id:
:vartype operation_id: str
:ivar service_id:
:vartype service_id: str
:ivar service_name:
:vartype service_name: str
:ivar status_detail:
:vartype status_detail: str
"""
_attribute_map = {
'operation_id': {'key': 'operationId', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'service_name': {'key': 'serviceName', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword operation_id:
:paramtype operation_id: str
:keyword service_id:
:paramtype service_id: str
:keyword service_name:
:paramtype service_name: str
:keyword status_detail:
:paramtype status_detail: str
"""
super(DeploymentInfo, self).__init__(**kwargs)
self.operation_id = kwargs.get('operation_id', None)
self.service_id = kwargs.get('service_id', None)
self.service_name = kwargs.get('service_name', None)
self.status_detail = kwargs.get('status_detail', None)
class DistributionConfiguration(msrest.serialization.Model):
"""DistributionConfiguration.
:ivar distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:vartype distribution_type: str or ~flow.models.DistributionType
"""
_attribute_map = {
'distribution_type': {'key': 'distributionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:paramtype distribution_type: str or ~flow.models.DistributionType
"""
super(DistributionConfiguration, self).__init__(**kwargs)
self.distribution_type = kwargs.get('distribution_type', None)
class DistributionParameter(msrest.serialization.Model):
"""DistributionParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar description:
:vartype description: str
:ivar input_type: Possible values include: "Text", "Number".
:vartype input_type: str or ~flow.models.DistributionParameterEnum
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'input_type': {'key': 'inputType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword description:
:paramtype description: str
:keyword input_type: Possible values include: "Text", "Number".
:paramtype input_type: str or ~flow.models.DistributionParameterEnum
"""
super(DistributionParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.description = kwargs.get('description', None)
self.input_type = kwargs.get('input_type', None)
class DockerBuildContext(msrest.serialization.Model):
"""DockerBuildContext.
:ivar location_type: Possible values include: "Git", "StorageAccount".
:vartype location_type: str or ~flow.models.BuildContextLocationType
:ivar location:
:vartype location: str
:ivar dockerfile_path:
:vartype dockerfile_path: str
"""
_attribute_map = {
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword location_type: Possible values include: "Git", "StorageAccount".
:paramtype location_type: str or ~flow.models.BuildContextLocationType
:keyword location:
:paramtype location: str
:keyword dockerfile_path:
:paramtype dockerfile_path: str
"""
super(DockerBuildContext, self).__init__(**kwargs)
self.location_type = kwargs.get('location_type', None)
self.location = kwargs.get('location', None)
self.dockerfile_path = kwargs.get('dockerfile_path', "Dockerfile")
class DockerConfiguration(msrest.serialization.Model):
"""DockerConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.arguments = kwargs.get('arguments', None)
class DockerImagePlatform(msrest.serialization.Model):
"""DockerImagePlatform.
:ivar os:
:vartype os: str
:ivar architecture:
:vartype architecture: str
"""
_attribute_map = {
'os': {'key': 'os', 'type': 'str'},
'architecture': {'key': 'architecture', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword os:
:paramtype os: str
:keyword architecture:
:paramtype architecture: str
"""
super(DockerImagePlatform, self).__init__(**kwargs)
self.os = kwargs.get('os', None)
self.architecture = kwargs.get('architecture', None)
class DockerSection(msrest.serialization.Model):
"""DockerSection.
:ivar base_image:
:vartype base_image: str
:ivar platform:
:vartype platform: ~flow.models.DockerImagePlatform
:ivar base_dockerfile:
:vartype base_dockerfile: str
:ivar build_context:
:vartype build_context: ~flow.models.DockerBuildContext
:ivar base_image_registry:
:vartype base_image_registry: ~flow.models.ContainerRegistry
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
'build_context': {'key': 'buildContext', 'type': 'DockerBuildContext'},
'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
}
def __init__(
self,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword platform:
:paramtype platform: ~flow.models.DockerImagePlatform
:keyword base_dockerfile:
:paramtype base_dockerfile: str
:keyword build_context:
:paramtype build_context: ~flow.models.DockerBuildContext
:keyword base_image_registry:
:paramtype base_image_registry: ~flow.models.ContainerRegistry
"""
super(DockerSection, self).__init__(**kwargs)
self.base_image = kwargs.get('base_image', None)
self.platform = kwargs.get('platform', None)
self.base_dockerfile = kwargs.get('base_dockerfile', None)
self.build_context = kwargs.get('build_context', None)
self.base_image_registry = kwargs.get('base_image_registry', None)
class DockerSettingConfiguration(msrest.serialization.Model):
"""DockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = kwargs.get('use_docker', None)
self.shared_volumes = kwargs.get('shared_volumes', None)
self.shm_size = kwargs.get('shm_size', None)
self.arguments = kwargs.get('arguments', None)
class DoWhileControlFlowInfo(msrest.serialization.Model):
"""DoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'DoWhileControlFlowRunSettings'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
super(DoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = kwargs.get('output_port_name_to_input_port_names_mapping', None)
self.condition_output_port_name = kwargs.get('condition_output_port_name', None)
self.run_settings = kwargs.get('run_settings', None)
class DoWhileControlFlowRunSettings(msrest.serialization.Model):
"""DoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
super(DoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = kwargs.get('max_loop_iteration_count', None)
class DownloadResourceInfo(msrest.serialization.Model):
"""DownloadResourceInfo.
:ivar download_url:
:vartype download_url: str
:ivar size:
:vartype size: long
"""
_attribute_map = {
'download_url': {'key': 'downloadUrl', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword download_url:
:paramtype download_url: str
:keyword size:
:paramtype size: long
"""
super(DownloadResourceInfo, self).__init__(**kwargs)
self.download_url = kwargs.get('download_url', None)
self.size = kwargs.get('size', None)
class EndpointSetting(msrest.serialization.Model):
"""EndpointSetting.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar ssl_thumbprint:
:vartype ssl_thumbprint: str
:ivar endpoint:
:vartype endpoint: str
:ivar proxy_endpoint:
:vartype proxy_endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar enabled:
:vartype enabled: bool
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword ssl_thumbprint:
:paramtype ssl_thumbprint: str
:keyword endpoint:
:paramtype endpoint: str
:keyword proxy_endpoint:
:paramtype proxy_endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword enabled:
:paramtype enabled: bool
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: str
"""
super(EndpointSetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.ssl_thumbprint = kwargs.get('ssl_thumbprint', None)
self.endpoint = kwargs.get('endpoint', None)
self.proxy_endpoint = kwargs.get('proxy_endpoint', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.enabled = kwargs.get('enabled', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class EntityInterface(msrest.serialization.Model):
"""EntityInterface.
:ivar parameters:
:vartype parameters: list[~flow.models.Parameter]
:ivar ports:
:vartype ports: ~flow.models.NodePortInterface
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.Parameter]
:ivar data_path_parameters:
:vartype data_path_parameters: list[~flow.models.DataPathParameter]
:ivar data_path_parameter_list:
:vartype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:ivar asset_output_settings_parameter_list:
:vartype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[Parameter]'},
'ports': {'key': 'ports', 'type': 'NodePortInterface'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[Parameter]'},
'data_path_parameters': {'key': 'dataPathParameters', 'type': '[DataPathParameter]'},
'data_path_parameter_list': {'key': 'dataPathParameterList', 'type': '[DataSetPathParameter]'},
'asset_output_settings_parameter_list': {'key': 'AssetOutputSettingsParameterList', 'type': '[AssetOutputSettingsParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[~flow.models.Parameter]
:keyword ports:
:paramtype ports: ~flow.models.NodePortInterface
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.Parameter]
:keyword data_path_parameters:
:paramtype data_path_parameters: list[~flow.models.DataPathParameter]
:keyword data_path_parameter_list:
:paramtype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:keyword asset_output_settings_parameter_list:
:paramtype asset_output_settings_parameter_list:
list[~flow.models.AssetOutputSettingsParameter]
"""
super(EntityInterface, self).__init__(**kwargs)
self.parameters = kwargs.get('parameters', None)
self.ports = kwargs.get('ports', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.data_path_parameters = kwargs.get('data_path_parameters', None)
self.data_path_parameter_list = kwargs.get('data_path_parameter_list', None)
self.asset_output_settings_parameter_list = kwargs.get('asset_output_settings_parameter_list', None)
class EntrySetting(msrest.serialization.Model):
"""EntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(EntrySetting, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class EnumParameterRule(msrest.serialization.Model):
"""EnumParameterRule.
:ivar valid_values:
:vartype valid_values: list[str]
"""
_attribute_map = {
'valid_values': {'key': 'validValues', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword valid_values:
:paramtype valid_values: list[str]
"""
super(EnumParameterRule, self).__init__(**kwargs)
self.valid_values = kwargs.get('valid_values', None)
class EnvironmentConfiguration(msrest.serialization.Model):
"""EnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(EnvironmentConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.use_environment_definition = kwargs.get('use_environment_definition', None)
self.environment_definition_string = kwargs.get('environment_definition_string', None)
class EnvironmentDefinition(msrest.serialization.Model):
"""EnvironmentDefinition.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar asset_id:
:vartype asset_id: str
:ivar auto_rebuild:
:vartype auto_rebuild: bool
:ivar python:
:vartype python: ~flow.models.PythonSection
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar docker:
:vartype docker: ~flow.models.DockerSection
:ivar spark:
:vartype spark: ~flow.models.SparkSection
:ivar r:
:vartype r: ~flow.models.RSection
:ivar inferencing_stack_version:
:vartype inferencing_stack_version: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'auto_rebuild': {'key': 'autoRebuild', 'type': 'bool'},
'python': {'key': 'python', 'type': 'PythonSection'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'docker': {'key': 'docker', 'type': 'DockerSection'},
'spark': {'key': 'spark', 'type': 'SparkSection'},
'r': {'key': 'r', 'type': 'RSection'},
'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword asset_id:
:paramtype asset_id: str
:keyword auto_rebuild:
:paramtype auto_rebuild: bool
:keyword python:
:paramtype python: ~flow.models.PythonSection
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword docker:
:paramtype docker: ~flow.models.DockerSection
:keyword spark:
:paramtype spark: ~flow.models.SparkSection
:keyword r:
:paramtype r: ~flow.models.RSection
:keyword inferencing_stack_version:
:paramtype inferencing_stack_version: str
"""
super(EnvironmentDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.asset_id = kwargs.get('asset_id', None)
self.auto_rebuild = kwargs.get('auto_rebuild', None)
self.python = kwargs.get('python', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.docker = kwargs.get('docker', None)
self.spark = kwargs.get('spark', None)
self.r = kwargs.get('r', None)
self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
class EnvironmentDefinitionDto(msrest.serialization.Model):
"""EnvironmentDefinitionDto.
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(EnvironmentDefinitionDto, self).__init__(**kwargs)
self.environment_name = kwargs.get('environment_name', None)
self.environment_version = kwargs.get('environment_version', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class EPRPipelineRunErrorClassificationRequest(msrest.serialization.Model):
"""EPRPipelineRunErrorClassificationRequest.
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_id:
:vartype run_id: str
:ivar task_result:
:vartype task_result: str
:ivar failure_type:
:vartype failure_type: str
:ivar failure_name:
:vartype failure_name: str
:ivar responsible_team:
:vartype responsible_team: str
"""
_attribute_map = {
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'task_result': {'key': 'taskResult', 'type': 'str'},
'failure_type': {'key': 'failureType', 'type': 'str'},
'failure_name': {'key': 'failureName', 'type': 'str'},
'responsible_team': {'key': 'responsibleTeam', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_id:
:paramtype run_id: str
:keyword task_result:
:paramtype task_result: str
:keyword failure_type:
:paramtype failure_type: str
:keyword failure_name:
:paramtype failure_name: str
:keyword responsible_team:
:paramtype responsible_team: str
"""
super(EPRPipelineRunErrorClassificationRequest, self).__init__(**kwargs)
self.root_run_id = kwargs.get('root_run_id', None)
self.run_id = kwargs.get('run_id', None)
self.task_result = kwargs.get('task_result', None)
self.failure_type = kwargs.get('failure_type', None)
self.failure_name = kwargs.get('failure_name', None)
self.responsible_team = kwargs.get('responsible_team', None)
class ErrorAdditionalInfo(msrest.serialization.Model):
"""The resource management error additional info.
:ivar type: The additional info type.
:vartype type: str
:ivar info: The additional info.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: The additional info type.
:paramtype type: str
:keyword info: The additional info.
:paramtype info: any
"""
super(ErrorAdditionalInfo, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.info = kwargs.get('info', None)
class ErrorResponse(msrest.serialization.Model):
"""The error response.
:ivar error: The root error.
:vartype error: ~flow.models.RootError
:ivar correlation: Dictionary containing correlation details for the error.
:vartype correlation: dict[str, str]
:ivar environment: The hosting environment.
:vartype environment: str
:ivar location: The Azure region.
:vartype location: str
:ivar time: The time in UTC.
:vartype time: ~datetime.datetime
:ivar component_name: Component name where error originated/encountered.
:vartype component_name: str
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'RootError'},
'correlation': {'key': 'correlation', 'type': '{str}'},
'environment': {'key': 'environment', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error: The root error.
:paramtype error: ~flow.models.RootError
:keyword correlation: Dictionary containing correlation details for the error.
:paramtype correlation: dict[str, str]
:keyword environment: The hosting environment.
:paramtype environment: str
:keyword location: The Azure region.
:paramtype location: str
:keyword time: The time in UTC.
:paramtype time: ~datetime.datetime
:keyword component_name: Component name where error originated/encountered.
:paramtype component_name: str
"""
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
self.correlation = kwargs.get('correlation', None)
self.environment = kwargs.get('environment', None)
self.location = kwargs.get('location', None)
self.time = kwargs.get('time', None)
self.component_name = kwargs.get('component_name', None)
class EsCloudConfiguration(msrest.serialization.Model):
"""EsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar environment:
:vartype environment: ~flow.models.EnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.K8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AEVAResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.TargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.DockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'environment': {'key': 'environment', 'type': 'EnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'HyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'K8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AEVAResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'TorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'TargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'DockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.K8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AEVAResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.TargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.DockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(EsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = kwargs.get('enable_output_to_file_based_on_data_type_id', None)
self.environment = kwargs.get('environment', None)
self.hyper_drive_configuration = kwargs.get('hyper_drive_configuration', None)
self.k8_s_config = kwargs.get('k8_s_config', None)
self.resource_config = kwargs.get('resource_config', None)
self.torch_distributed_config = kwargs.get('torch_distributed_config', None)
self.target_selector_config = kwargs.get('target_selector_config', None)
self.docker_config = kwargs.get('docker_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.identity = kwargs.get('identity', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.run_config = kwargs.get('run_config', None)
class EvaluationFlowRunSettings(msrest.serialization.Model):
"""EvaluationFlowRunSettings.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(EvaluationFlowRunSettings, self).__init__(**kwargs)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.connection_overrides = kwargs.get('connection_overrides', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.properties = kwargs.get('properties', None)
class ExampleRequest(msrest.serialization.Model):
"""ExampleRequest.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, list[list[any]]]
:ivar global_parameters: This is a dictionary.
:vartype global_parameters: dict[str, any]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{[[object]]}'},
'global_parameters': {'key': 'globalParameters', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, list[list[any]]]
:keyword global_parameters: This is a dictionary.
:paramtype global_parameters: dict[str, any]
"""
super(ExampleRequest, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.global_parameters = kwargs.get('global_parameters', None)
class ExecutionContextDto(msrest.serialization.Model):
"""ExecutionContextDto.
:ivar executable:
:vartype executable: str
:ivar user_code:
:vartype user_code: str
:ivar arguments:
:vartype arguments: str
"""
_attribute_map = {
'executable': {'key': 'executable', 'type': 'str'},
'user_code': {'key': 'userCode', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword executable:
:paramtype executable: str
:keyword user_code:
:paramtype user_code: str
:keyword arguments:
:paramtype arguments: str
"""
super(ExecutionContextDto, self).__init__(**kwargs)
self.executable = kwargs.get('executable', None)
self.user_code = kwargs.get('user_code', None)
self.arguments = kwargs.get('arguments', None)
class ExecutionDataLocation(msrest.serialization.Model):
"""ExecutionDataLocation.
:ivar dataset:
:vartype dataset: ~flow.models.RunDatasetReference
:ivar data_path:
:vartype data_path: ~flow.models.ExecutionDataPath
:ivar uri:
:vartype uri: ~flow.models.UriReference
:ivar type:
:vartype type: str
"""
_attribute_map = {
'dataset': {'key': 'dataset', 'type': 'RunDatasetReference'},
'data_path': {'key': 'dataPath', 'type': 'ExecutionDataPath'},
'uri': {'key': 'uri', 'type': 'UriReference'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset:
:paramtype dataset: ~flow.models.RunDatasetReference
:keyword data_path:
:paramtype data_path: ~flow.models.ExecutionDataPath
:keyword uri:
:paramtype uri: ~flow.models.UriReference
:keyword type:
:paramtype type: str
"""
super(ExecutionDataLocation, self).__init__(**kwargs)
self.dataset = kwargs.get('dataset', None)
self.data_path = kwargs.get('data_path', None)
self.uri = kwargs.get('uri', None)
self.type = kwargs.get('type', None)
class ExecutionDataPath(msrest.serialization.Model):
"""ExecutionDataPath.
:ivar datastore_name:
:vartype datastore_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'datastore_name': {'key': 'datastoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_name:
:paramtype datastore_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(ExecutionDataPath, self).__init__(**kwargs)
self.datastore_name = kwargs.get('datastore_name', None)
self.relative_path = kwargs.get('relative_path', None)
class ExecutionGlobsOptions(msrest.serialization.Model):
"""ExecutionGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(ExecutionGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class ExperimentComputeMetaInfo(msrest.serialization.Model):
"""ExperimentComputeMetaInfo.
:ivar current_node_count:
:vartype current_node_count: int
:ivar target_node_count:
:vartype target_node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar min_node_count:
:vartype min_node_count: int
:ivar idle_node_count:
:vartype idle_node_count: int
:ivar running_node_count:
:vartype running_node_count: int
:ivar preparing_node_count:
:vartype preparing_node_count: int
:ivar unusable_node_count:
:vartype unusable_node_count: int
:ivar leaving_node_count:
:vartype leaving_node_count: int
:ivar preempted_node_count:
:vartype preempted_node_count: int
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword current_node_count:
:paramtype current_node_count: int
:keyword target_node_count:
:paramtype target_node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword min_node_count:
:paramtype min_node_count: int
:keyword idle_node_count:
:paramtype idle_node_count: int
:keyword running_node_count:
:paramtype running_node_count: int
:keyword preparing_node_count:
:paramtype preparing_node_count: int
:keyword unusable_node_count:
:paramtype unusable_node_count: int
:keyword leaving_node_count:
:paramtype leaving_node_count: int
:keyword preempted_node_count:
:paramtype preempted_node_count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(ExperimentComputeMetaInfo, self).__init__(**kwargs)
self.current_node_count = kwargs.get('current_node_count', None)
self.target_node_count = kwargs.get('target_node_count', None)
self.max_node_count = kwargs.get('max_node_count', None)
self.min_node_count = kwargs.get('min_node_count', None)
self.idle_node_count = kwargs.get('idle_node_count', None)
self.running_node_count = kwargs.get('running_node_count', None)
self.preparing_node_count = kwargs.get('preparing_node_count', None)
self.unusable_node_count = kwargs.get('unusable_node_count', None)
self.leaving_node_count = kwargs.get('leaving_node_count', None)
self.preempted_node_count = kwargs.get('preempted_node_count', None)
self.vm_size = kwargs.get('vm_size', None)
self.location = kwargs.get('location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.state = kwargs.get('state', None)
self.os_type = kwargs.get('os_type', None)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs.get('compute_type', None)
class ExperimentInfo(msrest.serialization.Model):
"""ExperimentInfo.
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(ExperimentInfo, self).__init__(**kwargs)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class ExportComponentMetaInfo(msrest.serialization.Model):
"""ExportComponentMetaInfo.
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar module_version:
:vartype module_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
"""
_attribute_map = {
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword module_version:
:paramtype module_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
"""
super(ExportComponentMetaInfo, self).__init__(**kwargs)
self.module_entity = kwargs.get('module_entity', None)
self.module_version = kwargs.get('module_version', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
class ExportDataTask(msrest.serialization.Model):
"""ExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.DataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'DataTransferSink'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.DataTransferSink
"""
super(ExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = kwargs.get('data_transfer_sink', None)
class FeaturizationSettings(msrest.serialization.Model):
"""FeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.FeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.FeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(FeaturizationSettings, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.blocked_transformers = kwargs.get('blocked_transformers', None)
self.column_purposes = kwargs.get('column_purposes', None)
self.drop_columns = kwargs.get('drop_columns', None)
self.transformer_params = kwargs.get('transformer_params', None)
self.dataset_language = kwargs.get('dataset_language', None)
self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', None)
class FeedDto(msrest.serialization.Model):
"""FeedDto.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar sharing_scopes:
:vartype sharing_scopes: list[~flow.models.SharingScope]
:ivar supported_asset_types:
:vartype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:ivar regional_workspace_storage: This is a dictionary.
:vartype regional_workspace_storage: dict[str, list[str]]
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'sharing_scopes': {'key': 'sharingScopes', 'type': '[SharingScope]'},
'supported_asset_types': {'key': 'supportedAssetTypes', 'type': 'FeedDtoSupportedAssetTypes'},
'regional_workspace_storage': {'key': 'regionalWorkspaceStorage', 'type': '{[str]}'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword sharing_scopes:
:paramtype sharing_scopes: list[~flow.models.SharingScope]
:keyword supported_asset_types:
:paramtype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:keyword regional_workspace_storage: This is a dictionary.
:paramtype regional_workspace_storage: dict[str, list[str]]
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(FeedDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.sharing_scopes = kwargs.get('sharing_scopes', None)
self.supported_asset_types = kwargs.get('supported_asset_types', None)
self.regional_workspace_storage = kwargs.get('regional_workspace_storage', None)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class FeedDtoSupportedAssetTypes(msrest.serialization.Model):
"""FeedDtoSupportedAssetTypes.
:ivar component:
:vartype component: ~flow.models.AssetTypeMetaInfo
:ivar model:
:vartype model: ~flow.models.AssetTypeMetaInfo
:ivar environment:
:vartype environment: ~flow.models.AssetTypeMetaInfo
:ivar dataset:
:vartype dataset: ~flow.models.AssetTypeMetaInfo
:ivar data_store:
:vartype data_store: ~flow.models.AssetTypeMetaInfo
:ivar sample_graph:
:vartype sample_graph: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool:
:vartype flow_tool: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool_setting:
:vartype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:ivar flow_connection:
:vartype flow_connection: ~flow.models.AssetTypeMetaInfo
:ivar flow_sample:
:vartype flow_sample: ~flow.models.AssetTypeMetaInfo
:ivar flow_runtime_spec:
:vartype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
_attribute_map = {
'component': {'key': 'Component', 'type': 'AssetTypeMetaInfo'},
'model': {'key': 'Model', 'type': 'AssetTypeMetaInfo'},
'environment': {'key': 'Environment', 'type': 'AssetTypeMetaInfo'},
'dataset': {'key': 'Dataset', 'type': 'AssetTypeMetaInfo'},
'data_store': {'key': 'DataStore', 'type': 'AssetTypeMetaInfo'},
'sample_graph': {'key': 'SampleGraph', 'type': 'AssetTypeMetaInfo'},
'flow_tool': {'key': 'FlowTool', 'type': 'AssetTypeMetaInfo'},
'flow_tool_setting': {'key': 'FlowToolSetting', 'type': 'AssetTypeMetaInfo'},
'flow_connection': {'key': 'FlowConnection', 'type': 'AssetTypeMetaInfo'},
'flow_sample': {'key': 'FlowSample', 'type': 'AssetTypeMetaInfo'},
'flow_runtime_spec': {'key': 'FlowRuntimeSpec', 'type': 'AssetTypeMetaInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component:
:paramtype component: ~flow.models.AssetTypeMetaInfo
:keyword model:
:paramtype model: ~flow.models.AssetTypeMetaInfo
:keyword environment:
:paramtype environment: ~flow.models.AssetTypeMetaInfo
:keyword dataset:
:paramtype dataset: ~flow.models.AssetTypeMetaInfo
:keyword data_store:
:paramtype data_store: ~flow.models.AssetTypeMetaInfo
:keyword sample_graph:
:paramtype sample_graph: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool:
:paramtype flow_tool: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool_setting:
:paramtype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:keyword flow_connection:
:paramtype flow_connection: ~flow.models.AssetTypeMetaInfo
:keyword flow_sample:
:paramtype flow_sample: ~flow.models.AssetTypeMetaInfo
:keyword flow_runtime_spec:
:paramtype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
super(FeedDtoSupportedAssetTypes, self).__init__(**kwargs)
self.component = kwargs.get('component', None)
self.model = kwargs.get('model', None)
self.environment = kwargs.get('environment', None)
self.dataset = kwargs.get('dataset', None)
self.data_store = kwargs.get('data_store', None)
self.sample_graph = kwargs.get('sample_graph', None)
self.flow_tool = kwargs.get('flow_tool', None)
self.flow_tool_setting = kwargs.get('flow_tool_setting', None)
self.flow_connection = kwargs.get('flow_connection', None)
self.flow_sample = kwargs.get('flow_sample', None)
self.flow_runtime_spec = kwargs.get('flow_runtime_spec', None)
class FileSystem(msrest.serialization.Model):
"""FileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(FileSystem, self).__init__(**kwargs)
self.connection = kwargs.get('connection', None)
self.path = kwargs.get('path', None)
class Flow(msrest.serialization.Model):
"""Flow.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.NodeVariant]
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar bulk_test_data: This is a dictionary.
:vartype bulk_test_data: dict[str, str]
:ivar evaluation_flows: This is a dictionary.
:vartype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'node_variants': {'key': 'nodeVariants', 'type': '{NodeVariant}'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'bulk_test_data': {'key': 'bulkTestData', 'type': '{str}'},
'evaluation_flows': {'key': 'evaluationFlows', 'type': '{FlowGraphReference}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.NodeVariant]
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword bulk_test_data: This is a dictionary.
:paramtype bulk_test_data: dict[str, str]
:keyword evaluation_flows: This is a dictionary.
:paramtype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
super(Flow, self).__init__(**kwargs)
self.source_resource_id = kwargs.get('source_resource_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.node_variants = kwargs.get('node_variants', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.bulk_test_data = kwargs.get('bulk_test_data', None)
self.evaluation_flows = kwargs.get('evaluation_flows', None)
class FlowAnnotations(msrest.serialization.Model):
"""FlowAnnotations.
:ivar flow_name:
:vartype flow_name: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(FlowAnnotations, self).__init__(**kwargs)
self.flow_name = kwargs.get('flow_name', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.archived = kwargs.get('archived', None)
self.tags = kwargs.get('tags', None)
class FlowBaseDto(msrest.serialization.Model):
"""FlowBaseDto.
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowBaseDto, self).__init__(**kwargs)
self.flow_id = kwargs.get('flow_id', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.flow_type = kwargs.get('flow_type', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.flow_resource_id = kwargs.get('flow_resource_id', None)
self.is_archived = kwargs.get('is_archived', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowDto(msrest.serialization.Model):
"""FlowDto.
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
:ivar e_tag: Any object.
:vartype e_tag: any
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'e_tag': {'key': 'eTag', 'type': 'object'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
:keyword e_tag: Any object.
:paramtype e_tag: any
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowDto, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.e_tag = kwargs.get('e_tag', None)
self.flow = kwargs.get('flow', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.flow_run_result = kwargs.get('flow_run_result', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None)
self.flow_id = kwargs.get('flow_id', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.flow_type = kwargs.get('flow_type', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.owner = kwargs.get('owner', None)
self.flow_resource_id = kwargs.get('flow_resource_id', None)
self.is_archived = kwargs.get('is_archived', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowEnvironment(msrest.serialization.Model):
"""FlowEnvironment.
:ivar image:
:vartype image: str
:ivar python_requirements_txt:
:vartype python_requirements_txt: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'python_requirements_txt': {'key': 'python_requirements_txt', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword python_requirements_txt:
:paramtype python_requirements_txt: str
"""
super(FlowEnvironment, self).__init__(**kwargs)
self.image = kwargs.get('image', None)
self.python_requirements_txt = kwargs.get('python_requirements_txt', None)
class FlowFeature(msrest.serialization.Model):
"""FlowFeature.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar state:
:vartype state: ~flow.models.FlowFeatureState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'state': {'key': 'state', 'type': 'FlowFeatureState'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword state:
:paramtype state: ~flow.models.FlowFeatureState
"""
super(FlowFeature, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.state = kwargs.get('state', None)
class FlowFeatureState(msrest.serialization.Model):
"""FlowFeatureState.
:ivar runtime: Possible values include: "Ready", "E2ETest".
:vartype runtime: str or ~flow.models.FlowFeatureStateEnum
:ivar executor: Possible values include: "Ready", "E2ETest".
:vartype executor: str or ~flow.models.FlowFeatureStateEnum
:ivar pfs: Possible values include: "Ready", "E2ETest".
:vartype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
_attribute_map = {
'runtime': {'key': 'Runtime', 'type': 'str'},
'executor': {'key': 'Executor', 'type': 'str'},
'pfs': {'key': 'PFS', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime: Possible values include: "Ready", "E2ETest".
:paramtype runtime: str or ~flow.models.FlowFeatureStateEnum
:keyword executor: Possible values include: "Ready", "E2ETest".
:paramtype executor: str or ~flow.models.FlowFeatureStateEnum
:keyword pfs: Possible values include: "Ready", "E2ETest".
:paramtype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
super(FlowFeatureState, self).__init__(**kwargs)
self.runtime = kwargs.get('runtime', None)
self.executor = kwargs.get('executor', None)
self.pfs = kwargs.get('pfs', None)
class FlowGraph(msrest.serialization.Model):
"""FlowGraph.
:ivar nodes:
:vartype nodes: list[~flow.models.Node]
:ivar tools:
:vartype tools: list[~flow.models.Tool]
:ivar codes: This is a dictionary.
:vartype codes: dict[str, str]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
_attribute_map = {
'nodes': {'key': 'nodes', 'type': '[Node]'},
'tools': {'key': 'tools', 'type': '[Tool]'},
'codes': {'key': 'codes', 'type': '{str}'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes:
:paramtype nodes: list[~flow.models.Node]
:keyword tools:
:paramtype tools: list[~flow.models.Tool]
:keyword codes: This is a dictionary.
:paramtype codes: dict[str, str]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
super(FlowGraph, self).__init__(**kwargs)
self.nodes = kwargs.get('nodes', None)
self.tools = kwargs.get('tools', None)
self.codes = kwargs.get('codes', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class FlowGraphAnnotationNode(msrest.serialization.Model):
"""FlowGraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(FlowGraphAnnotationNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.content = kwargs.get('content', None)
self.mentioned_node_names = kwargs.get('mentioned_node_names', None)
self.structured_content = kwargs.get('structured_content', None)
class FlowGraphLayout(msrest.serialization.Model):
"""FlowGraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:ivar orientation: Possible values include: "Horizontal", "Vertical".
:vartype orientation: str or ~flow.models.Orientation
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{FlowNodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[FlowGraphAnnotationNode]'},
'orientation': {'key': 'orientation', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:keyword orientation: Possible values include: "Horizontal", "Vertical".
:paramtype orientation: str or ~flow.models.Orientation
"""
super(FlowGraphLayout, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
self.orientation = kwargs.get('orientation', None)
class FlowGraphReference(msrest.serialization.Model):
"""FlowGraphReference.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar reference_resource_id:
:vartype reference_resource_id: str
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'reference_resource_id': {'key': 'referenceResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword reference_resource_id:
:paramtype reference_resource_id: str
"""
super(FlowGraphReference, self).__init__(**kwargs)
self.flow_graph = kwargs.get('flow_graph', None)
self.reference_resource_id = kwargs.get('reference_resource_id', None)
class FlowIndexEntity(msrest.serialization.Model):
"""FlowIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.FlowAnnotations
:ivar properties:
:vartype properties: ~flow.models.FlowProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'FlowAnnotations'},
'properties': {'key': 'properties', 'type': 'FlowProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.FlowAnnotations
:keyword properties:
:paramtype properties: ~flow.models.FlowProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(FlowIndexEntity, self).__init__(**kwargs)
self.schema_id = kwargs.get('schema_id', None)
self.entity_id = kwargs.get('entity_id', None)
self.kind = kwargs.get('kind', None)
self.annotations = kwargs.get('annotations', None)
self.properties = kwargs.get('properties', None)
self.internal = kwargs.get('internal', None)
self.update_sequence = kwargs.get('update_sequence', None)
self.type = kwargs.get('type', None)
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = kwargs.get('relationships', None)
self.asset_id = kwargs.get('asset_id', None)
class FlowInputDefinition(msrest.serialization.Model):
"""FlowInputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype type: str or ~flow.models.ValueType
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar is_chat_input:
:vartype is_chat_input: bool
:ivar is_chat_history:
:vartype is_chat_history: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'is_chat_input': {'key': 'is_chat_input', 'type': 'bool'},
'is_chat_history': {'key': 'is_chat_history', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype type: str or ~flow.models.ValueType
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword is_chat_input:
:paramtype is_chat_input: bool
:keyword is_chat_history:
:paramtype is_chat_history: bool
"""
super(FlowInputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.description = kwargs.get('description', None)
self.is_chat_input = kwargs.get('is_chat_input', None)
self.is_chat_history = kwargs.get('is_chat_history', None)
class FlowNode(msrest.serialization.Model):
"""FlowNode.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar use_variants:
:vartype use_variants: bool
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
:ivar aggregation:
:vartype aggregation: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'use_variants': {'key': 'use_variants', 'type': 'bool'},
'activate': {'key': 'activate', 'type': 'Activate'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'aggregation': {'key': 'aggregation', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword use_variants:
:paramtype use_variants: bool
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
:keyword aggregation:
:paramtype aggregation: bool
"""
super(FlowNode, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.source = kwargs.get('source', None)
self.inputs = kwargs.get('inputs', None)
self.use_variants = kwargs.get('use_variants', None)
self.activate = kwargs.get('activate', None)
self.comment = kwargs.get('comment', None)
self.api = kwargs.get('api', None)
self.provider = kwargs.get('provider', None)
self.connection = kwargs.get('connection', None)
self.module = kwargs.get('module', None)
self.aggregation = kwargs.get('aggregation', None)
class FlowNodeLayout(msrest.serialization.Model):
"""FlowNodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar index:
:vartype index: int
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'index': {'key': 'index', 'type': 'int'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword index:
:paramtype index: int
:keyword extended_data:
:paramtype extended_data: str
"""
super(FlowNodeLayout, self).__init__(**kwargs)
self.x = kwargs.get('x', None)
self.y = kwargs.get('y', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
self.index = kwargs.get('index', None)
self.extended_data = kwargs.get('extended_data', None)
class FlowNodeVariant(msrest.serialization.Model):
"""FlowNodeVariant.
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.FlowVariantNode]
"""
_attribute_map = {
'default_variant_id': {'key': 'default_variant_id', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{FlowVariantNode}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.FlowVariantNode]
"""
super(FlowNodeVariant, self).__init__(**kwargs)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
class FlowOutputDefinition(msrest.serialization.Model):
"""FlowOutputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype type: str or ~flow.models.ValueType
:ivar description:
:vartype description: str
:ivar reference:
:vartype reference: str
:ivar evaluation_only:
:vartype evaluation_only: bool
:ivar is_chat_output:
:vartype is_chat_output: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'reference': {'key': 'reference', 'type': 'str'},
'evaluation_only': {'key': 'evaluation_only', 'type': 'bool'},
'is_chat_output': {'key': 'is_chat_output', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype type: str or ~flow.models.ValueType
:keyword description:
:paramtype description: str
:keyword reference:
:paramtype reference: str
:keyword evaluation_only:
:paramtype evaluation_only: bool
:keyword is_chat_output:
:paramtype is_chat_output: bool
"""
super(FlowOutputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.description = kwargs.get('description', None)
self.reference = kwargs.get('reference', None)
self.evaluation_only = kwargs.get('evaluation_only', None)
self.is_chat_output = kwargs.get('is_chat_output', None)
class FlowProperties(msrest.serialization.Model):
"""FlowProperties.
:ivar flow_id:
:vartype flow_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(FlowProperties, self).__init__(**kwargs)
self.flow_id = kwargs.get('flow_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.creation_context = kwargs.get('creation_context', None)
class FlowRunBasePath(msrest.serialization.Model):
"""FlowRunBasePath.
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar base_path:
:vartype base_path: str
"""
_attribute_map = {
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'base_path': {'key': 'basePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword base_path:
:paramtype base_path: str
"""
super(FlowRunBasePath, self).__init__(**kwargs)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
self.base_path = kwargs.get('base_path', None)
class FlowRunInfo(msrest.serialization.Model):
"""FlowRunInfo.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar child_run_base_path:
:vartype child_run_base_path: str
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'child_run_base_path': {'key': 'childRunBasePath', 'type': 'str'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword child_run_base_path:
:paramtype child_run_base_path: str
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
"""
super(FlowRunInfo, self).__init__(**kwargs)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
self.child_run_base_path = kwargs.get('child_run_base_path', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.studio_portal_endpoint = kwargs.get('studio_portal_endpoint', None)
class FlowRunResult(msrest.serialization.Model):
"""FlowRunResult.
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(FlowRunResult, self).__init__(**kwargs)
self.flow_runs = kwargs.get('flow_runs', None)
self.node_runs = kwargs.get('node_runs', None)
self.error_response = kwargs.get('error_response', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.flow_run_logs = kwargs.get('flow_run_logs', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None)
class FlowRunSettings(msrest.serialization.Model):
"""FlowRunSettings.
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar variants_tools:
:vartype variants_tools: list[~flow.models.Tool]
:ivar variants_codes: This is a dictionary.
:vartype variants_codes: dict[str, str]
:ivar node_name:
:vartype node_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
"""
_attribute_map = {
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'run_mode': {'key': 'runMode', 'type': 'str'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'},
'variants_codes': {'key': 'variantsCodes', 'type': '{str}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword variants_tools:
:paramtype variants_tools: list[~flow.models.Tool]
:keyword variants_codes: This is a dictionary.
:paramtype variants_codes: dict[str, str]
:keyword node_name:
:paramtype node_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
"""
super(FlowRunSettings, self).__init__(**kwargs)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.run_mode = kwargs.get('run_mode', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.tuning_node_names = kwargs.get('tuning_node_names', None)
self.tuning_node_settings = kwargs.get('tuning_node_settings', None)
self.baseline_variant_id = kwargs.get('baseline_variant_id', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
self.variants_tools = kwargs.get('variants_tools', None)
self.variants_codes = kwargs.get('variants_codes', None)
self.node_name = kwargs.get('node_name', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None)
self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
class FlowRuntimeCapability(msrest.serialization.Model):
"""FlowRuntimeCapability.
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
"""
_attribute_map = {
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
"""
super(FlowRuntimeCapability, self).__init__(**kwargs)
self.flow_features = kwargs.get('flow_features', None)
class FlowRuntimeDto(msrest.serialization.Model):
"""FlowRuntimeDto.
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowRuntimeDto, self).__init__(**kwargs)
self.runtime_name = kwargs.get('runtime_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.runtime_type = kwargs.get('runtime_type', None)
self.environment = kwargs.get('environment', None)
self.status = kwargs.get('status', None)
self.status_message = kwargs.get('status_message', None)
self.error = kwargs.get('error', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.docker_image = kwargs.get('docker_image', None)
self.published_port = kwargs.get('published_port', None)
self.target_port = kwargs.get('target_port', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.assigned_to = kwargs.get('assigned_to', None)
self.endpoint_url = kwargs.get('endpoint_url', None)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.owner = kwargs.get('owner', None)
class FlowSampleDto(msrest.serialization.Model):
"""FlowSampleDto.
:ivar sample_resource_id:
:vartype sample_resource_id: str
:ivar section: Possible values include: "Gallery", "Template".
:vartype section: str or ~flow.models.Section
:ivar index_number:
:vartype index_number: int
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'},
'section': {'key': 'section', 'type': 'str'},
'index_number': {'key': 'indexNumber', 'type': 'int'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sample_resource_id:
:paramtype sample_resource_id: str
:keyword section: Possible values include: "Gallery", "Template".
:paramtype section: str or ~flow.models.Section
:keyword index_number:
:paramtype index_number: int
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowSampleDto, self).__init__(**kwargs)
self.sample_resource_id = kwargs.get('sample_resource_id', None)
self.section = kwargs.get('section', None)
self.index_number = kwargs.get('index_number', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class FlowSessionDto(msrest.serialization.Model):
"""FlowSessionDto.
:ivar session_id:
:vartype session_id: str
:ivar base_image:
:vartype base_image: str
:ivar packages:
:vartype packages: list[str]
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'packages': {'key': 'packages', 'type': '[str]'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword base_image:
:paramtype base_image: str
:keyword packages:
:paramtype packages: list[str]
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowSessionDto, self).__init__(**kwargs)
self.session_id = kwargs.get('session_id', None)
self.base_image = kwargs.get('base_image', None)
self.packages = kwargs.get('packages', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.flow_features = kwargs.get('flow_features', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.runtime_description = kwargs.get('runtime_description', None)
self.runtime_type = kwargs.get('runtime_type', None)
self.environment = kwargs.get('environment', None)
self.status = kwargs.get('status', None)
self.status_message = kwargs.get('status_message', None)
self.error = kwargs.get('error', None)
self.from_existing_endpoint = kwargs.get('from_existing_endpoint', None)
self.endpoint_name = kwargs.get('endpoint_name', None)
self.from_existing_deployment = kwargs.get('from_existing_deployment', None)
self.deployment_name = kwargs.get('deployment_name', None)
self.identity = kwargs.get('identity', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_count = kwargs.get('instance_count', None)
self.compute_instance_name = kwargs.get('compute_instance_name', None)
self.docker_image = kwargs.get('docker_image', None)
self.published_port = kwargs.get('published_port', None)
self.target_port = kwargs.get('target_port', None)
self.from_existing_custom_app = kwargs.get('from_existing_custom_app', None)
self.custom_app_name = kwargs.get('custom_app_name', None)
self.assigned_to = kwargs.get('assigned_to', None)
self.endpoint_url = kwargs.get('endpoint_url', None)
self.created_on = kwargs.get('created_on', None)
self.modified_on = kwargs.get('modified_on', None)
self.owner = kwargs.get('owner', None)
class FlowSnapshot(msrest.serialization.Model):
"""FlowSnapshot.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar nodes:
:vartype nodes: list[~flow.models.FlowNode]
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:ivar environment:
:vartype environment: ~flow.models.FlowEnvironment
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, any]
:ivar language: Possible values include: "Python", "CSharp".
:vartype language: str or ~flow.models.FlowLanguage
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'nodes': {'key': 'nodes', 'type': '[FlowNode]'},
'node_variants': {'key': 'node_variants', 'type': '{FlowNodeVariant}'},
'environment': {'key': 'environment', 'type': 'FlowEnvironment'},
'environment_variables': {'key': 'environment_variables', 'type': '{object}'},
'language': {'key': 'language', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword nodes:
:paramtype nodes: list[~flow.models.FlowNode]
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:keyword environment:
:paramtype environment: ~flow.models.FlowEnvironment
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, any]
:keyword language: Possible values include: "Python", "CSharp".
:paramtype language: str or ~flow.models.FlowLanguage
"""
super(FlowSnapshot, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.nodes = kwargs.get('nodes', None)
self.node_variants = kwargs.get('node_variants', None)
self.environment = kwargs.get('environment', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.language = kwargs.get('language', None)
class FlowSubmitRunSettings(msrest.serialization.Model):
"""FlowSubmitRunSettings.
:ivar node_inputs: This is a dictionary.
:vartype node_inputs: dict[str, any]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar variants_tools:
:vartype variants_tools: list[~flow.models.Tool]
:ivar variants_codes: This is a dictionary.
:vartype variants_codes: dict[str, str]
:ivar node_name:
:vartype node_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
"""
_attribute_map = {
'node_inputs': {'key': 'nodeInputs', 'type': '{object}'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'run_mode': {'key': 'runMode', 'type': 'str'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'},
'variants_codes': {'key': 'variantsCodes', 'type': '{str}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_inputs: This is a dictionary.
:paramtype node_inputs: dict[str, any]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword variants_tools:
:paramtype variants_tools: list[~flow.models.Tool]
:keyword variants_codes: This is a dictionary.
:paramtype variants_codes: dict[str, str]
:keyword node_name:
:paramtype node_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
"""
super(FlowSubmitRunSettings, self).__init__(**kwargs)
self.node_inputs = kwargs.get('node_inputs', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.run_mode = kwargs.get('run_mode', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.tuning_node_names = kwargs.get('tuning_node_names', None)
self.tuning_node_settings = kwargs.get('tuning_node_settings', None)
self.baseline_variant_id = kwargs.get('baseline_variant_id', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
self.variants = kwargs.get('variants', None)
self.variants_tools = kwargs.get('variants_tools', None)
self.variants_codes = kwargs.get('variants_codes', None)
self.node_name = kwargs.get('node_name', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.evaluation_flow_run_settings = kwargs.get('evaluation_flow_run_settings', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.data_inputs = kwargs.get('data_inputs', None)
self.bulk_test_flow_id = kwargs.get('bulk_test_flow_id', None)
self.bulk_test_flow_run_ids = kwargs.get('bulk_test_flow_run_ids', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.flow_run_output_directory = kwargs.get('flow_run_output_directory', None)
class FlowTestInfo(msrest.serialization.Model):
"""FlowTestInfo.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_test_storage_setting:
:vartype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_test_storage_setting': {'key': 'flowTestStorageSetting', 'type': 'FlowTestStorageSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_test_storage_setting:
:paramtype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
"""
super(FlowTestInfo, self).__init__(**kwargs)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_test_storage_setting = kwargs.get('flow_test_storage_setting', None)
class FlowTestStorageSetting(msrest.serialization.Model):
"""FlowTestStorageSetting.
:ivar storage_account_name:
:vartype storage_account_name: str
:ivar blob_container_name:
:vartype blob_container_name: str
:ivar flow_artifacts_root_path:
:vartype flow_artifacts_root_path: str
:ivar output_datastore_name:
:vartype output_datastore_name: str
"""
_attribute_map = {
'storage_account_name': {'key': 'storageAccountName', 'type': 'str'},
'blob_container_name': {'key': 'blobContainerName', 'type': 'str'},
'flow_artifacts_root_path': {'key': 'flowArtifactsRootPath', 'type': 'str'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_account_name:
:paramtype storage_account_name: str
:keyword blob_container_name:
:paramtype blob_container_name: str
:keyword flow_artifacts_root_path:
:paramtype flow_artifacts_root_path: str
:keyword output_datastore_name:
:paramtype output_datastore_name: str
"""
super(FlowTestStorageSetting, self).__init__(**kwargs)
self.storage_account_name = kwargs.get('storage_account_name', None)
self.blob_container_name = kwargs.get('blob_container_name', None)
self.flow_artifacts_root_path = kwargs.get('flow_artifacts_root_path', None)
self.output_datastore_name = kwargs.get('output_datastore_name', None)
class FlowToolsDto(msrest.serialization.Model):
"""FlowToolsDto.
:ivar package: This is a dictionary.
:vartype package: dict[str, ~flow.models.Tool]
:ivar code: This is a dictionary.
:vartype code: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'package': {'key': 'package', 'type': '{Tool}'},
'code': {'key': 'code', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword package: This is a dictionary.
:paramtype package: dict[str, ~flow.models.Tool]
:keyword code: This is a dictionary.
:paramtype code: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(FlowToolsDto, self).__init__(**kwargs)
self.package = kwargs.get('package', None)
self.code = kwargs.get('code', None)
self.errors = kwargs.get('errors', None)
class FlowToolSettingParameter(msrest.serialization.Model):
"""FlowToolSettingParameter.
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default:
:vartype default: str
:ivar advanced:
:vartype advanced: bool
:ivar enum:
:vartype enum: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar text_box_size:
:vartype text_box_size: int
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
"""
_attribute_map = {
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'enum': {'key': 'enum', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'text_box_size': {'key': 'text_box_size', 'type': 'int'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default:
:paramtype default: str
:keyword advanced:
:paramtype advanced: bool
:keyword enum:
:paramtype enum: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword text_box_size:
:paramtype text_box_size: int
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
"""
super(FlowToolSettingParameter, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.advanced = kwargs.get('advanced', None)
self.enum = kwargs.get('enum', None)
self.model_list = kwargs.get('model_list', None)
self.text_box_size = kwargs.get('text_box_size', None)
self.capabilities = kwargs.get('capabilities', None)
self.allow_manual_entry = kwargs.get('allow_manual_entry', None)
class FlowVariantNode(msrest.serialization.Model):
"""FlowVariantNode.
:ivar node:
:vartype node: ~flow.models.FlowNode
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'FlowNode'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.FlowNode
:keyword description:
:paramtype description: str
"""
super(FlowVariantNode, self).__init__(**kwargs)
self.node = kwargs.get('node', None)
self.description = kwargs.get('description', None)
class ForecastHorizon(msrest.serialization.Model):
"""ForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.ForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.ForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(ForecastHorizon, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class ForecastingSettings(msrest.serialization.Model):
"""ForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.TargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.ForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.Seasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.UseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'TargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'Seasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.TargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.ForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.Seasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.UseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(ForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None)
self.time_column_name = kwargs.get('time_column_name', None)
self.target_lags = kwargs.get('target_lags', None)
self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None)
self.forecast_horizon = kwargs.get('forecast_horizon', None)
self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None)
self.frequency = kwargs.get('frequency', None)
self.feature_lags = kwargs.get('feature_lags', None)
self.seasonality = kwargs.get('seasonality', None)
self.short_series_handling_config = kwargs.get('short_series_handling_config', None)
self.use_stl = kwargs.get('use_stl', None)
self.target_aggregate_function = kwargs.get('target_aggregate_function', None)
self.cv_step_size = kwargs.get('cv_step_size', None)
self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None)
class GeneralSettings(msrest.serialization.Model):
"""GeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.PrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.TaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.LogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.PrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.TaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.LogVerbosity
"""
super(GeneralSettings, self).__init__(**kwargs)
self.primary_metric = kwargs.get('primary_metric', None)
self.task_type = kwargs.get('task_type', None)
self.log_verbosity = kwargs.get('log_verbosity', None)
class GeneratePipelineComponentRequest(msrest.serialization.Model):
"""GeneratePipelineComponentRequest.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar category:
:vartype category: str
:ivar version:
:vartype version: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar registry_name:
:vartype registry_name: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword category:
:paramtype category: str
:keyword version:
:paramtype version: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword registry_name:
:paramtype registry_name: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(GeneratePipelineComponentRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.module_scope = kwargs.get('module_scope', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.category = kwargs.get('category', None)
self.version = kwargs.get('version', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.registry_name = kwargs.get('registry_name', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class GenerateToolMetaRequest(msrest.serialization.Model):
"""GenerateToolMetaRequest.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.ToolSourceMeta]
:ivar working_dir:
:vartype working_dir: str
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{ToolSourceMeta}'},
'working_dir': {'key': 'working_dir', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.ToolSourceMeta]
:keyword working_dir:
:paramtype working_dir: str
"""
super(GenerateToolMetaRequest, self).__init__(**kwargs)
self.tools = kwargs.get('tools', None)
self.working_dir = kwargs.get('working_dir', None)
class GetDynamicListRequest(msrest.serialization.Model):
"""GetDynamicListRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
"""
super(GetDynamicListRequest, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
class GetRunDataResultDto(msrest.serialization.Model):
"""GetRunDataResultDto.
:ivar run_metadata:
:vartype run_metadata: ~flow.models.RunDto
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar system_settings: Dictionary of :code:`<string>`.
:vartype system_settings: dict[str, str]
"""
_attribute_map = {
'run_metadata': {'key': 'runMetadata', 'type': 'RunDto'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'system_settings': {'key': 'systemSettings', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_metadata:
:paramtype run_metadata: ~flow.models.RunDto
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword system_settings: Dictionary of :code:`<string>`.
:paramtype system_settings: dict[str, str]
"""
super(GetRunDataResultDto, self).__init__(**kwargs)
self.run_metadata = kwargs.get('run_metadata', None)
self.run_definition = kwargs.get('run_definition', None)
self.job_specification = kwargs.get('job_specification', None)
self.system_settings = kwargs.get('system_settings', None)
class GetTrainingSessionDto(msrest.serialization.Model):
"""GetTrainingSessionDto.
:ivar properties:
:vartype properties: ~flow.models.SessionProperties
:ivar compute:
:vartype compute: ~flow.models.ComputeContract
"""
_attribute_map = {
'properties': {'key': 'properties', 'type': 'SessionProperties'},
'compute': {'key': 'compute', 'type': 'ComputeContract'},
}
def __init__(
self,
**kwargs
):
"""
:keyword properties:
:paramtype properties: ~flow.models.SessionProperties
:keyword compute:
:paramtype compute: ~flow.models.ComputeContract
"""
super(GetTrainingSessionDto, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.compute = kwargs.get('compute', None)
class GlobalJobDispatcherConfiguration(msrest.serialization.Model):
"""GlobalJobDispatcherConfiguration.
:ivar vm_size:
:vartype vm_size: list[str]
:ivar compute_type: Possible values include: "AmlCompute", "AmlK8s".
:vartype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:ivar region:
:vartype region: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar redispatch_allowed:
:vartype redispatch_allowed: bool
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar vc_list:
:vartype vc_list: list[str]
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar vc_block_list:
:vartype vc_block_list: list[str]
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
"""
_attribute_map = {
'vm_size': {'key': 'vmSize', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'redispatch_allowed': {'key': 'redispatchAllowed', 'type': 'bool'},
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'vc_list': {'key': 'vcList', 'type': '[str]'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword vm_size:
:paramtype vm_size: list[str]
:keyword compute_type: Possible values include: "AmlCompute", "AmlK8s".
:paramtype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:keyword region:
:paramtype region: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword redispatch_allowed:
:paramtype redispatch_allowed: bool
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword vc_list:
:paramtype vc_list: list[str]
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
"""
super(GlobalJobDispatcherConfiguration, self).__init__(**kwargs)
self.vm_size = kwargs.get('vm_size', None)
self.compute_type = kwargs.get('compute_type', None)
self.region = kwargs.get('region', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.redispatch_allowed = kwargs.get('redispatch_allowed', None)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.vc_list = kwargs.get('vc_list', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
class GlobsOptions(msrest.serialization.Model):
"""GlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(GlobsOptions, self).__init__(**kwargs)
self.glob_patterns = kwargs.get('glob_patterns', None)
class GraphAnnotationNode(msrest.serialization.Model):
"""GraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(GraphAnnotationNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.content = kwargs.get('content', None)
self.mentioned_node_names = kwargs.get('mentioned_node_names', None)
self.structured_content = kwargs.get('structured_content', None)
class GraphControlNode(msrest.serialization.Model):
"""GraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.ParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'ParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.ParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.control_type = kwargs.get('control_type', None)
self.control_parameter = kwargs.get('control_parameter', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphControlReferenceNode(msrest.serialization.Model):
"""GraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'DoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'ParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlReferenceNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.comment = kwargs.get('comment', None)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.reference_node_id = kwargs.get('reference_node_id', None)
self.do_while_control_flow_info = kwargs.get('do_while_control_flow_info', None)
self.parallel_for_control_flow_info = kwargs.get('parallel_for_control_flow_info', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphDatasetNode(msrest.serialization.Model):
"""GraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.DataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'DataSetDefinition'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.DataSetDefinition
"""
super(GraphDatasetNode, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.dataset_id = kwargs.get('dataset_id', None)
self.data_path_parameter_name = kwargs.get('data_path_parameter_name', None)
self.data_set_definition = kwargs.get('data_set_definition', None)
class GraphDraftEntity(msrest.serialization.Model):
"""GraphDraftEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphDraftEntity, self).__init__(**kwargs)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.entity_interface = kwargs.get('entity_interface', None)
self.graph_layout = kwargs.get('graph_layout', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class GraphEdge(msrest.serialization.Model):
"""GraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.PortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.PortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'PortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'PortInfo'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.PortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.PortInfo
"""
super(GraphEdge, self).__init__(**kwargs)
self.source_output_port = kwargs.get('source_output_port', None)
self.destination_input_port = kwargs.get('destination_input_port', None)
class GraphLayout(msrest.serialization.Model):
"""GraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphLayout, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class GraphLayoutCreationInfo(msrest.serialization.Model):
"""GraphLayoutCreationInfo.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
super(GraphLayoutCreationInfo, self).__init__(**kwargs)
self.node_layouts = kwargs.get('node_layouts', None)
self.extended_data = kwargs.get('extended_data', None)
self.annotation_nodes = kwargs.get('annotation_nodes', None)
class GraphModuleNode(msrest.serialization.Model):
"""GraphModuleNode.
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'module_type': {'key': 'moduleType', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphModuleNode, self).__init__(**kwargs)
self.module_type = kwargs.get('module_type', None)
self.runconfig = kwargs.get('runconfig', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class GraphModuleNodeRunSetting(msrest.serialization.Model):
"""GraphModuleNodeRunSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar step_type:
:vartype step_type: str
:ivar run_settings:
:vartype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': '[RunSettingParameterAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword step_type:
:paramtype step_type: str
:keyword run_settings:
:paramtype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
super(GraphModuleNodeRunSetting, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.module_id = kwargs.get('module_id', None)
self.step_type = kwargs.get('step_type', None)
self.run_settings = kwargs.get('run_settings', None)
class GraphModuleNodeUIInputSetting(msrest.serialization.Model):
"""GraphModuleNodeUIInputSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.UIInputSetting]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[UIInputSetting]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.UIInputSetting]
"""
super(GraphModuleNodeUIInputSetting, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.module_id = kwargs.get('module_id', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
class GraphNodeStatusInfo(msrest.serialization.Model):
"""GraphNodeStatusInfo.
:ivar status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished",
"Canceled", "PartiallyExecuted", "Bypassed".
:vartype status: str or ~flow.models.TaskStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar is_bypassed:
:vartype is_bypassed: bool
:ivar has_failed_child_run:
:vartype has_failed_child_run: bool
:ivar partially_executed:
:vartype partially_executed: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar aether_creation_time:
:vartype aether_creation_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar run_history_creation_time:
:vartype run_history_creation_time: ~datetime.datetime
:ivar reuse_info:
:vartype reuse_info: ~flow.models.TaskReuseInfo
:ivar control_flow_info:
:vartype control_flow_info: ~flow.models.TaskControlFlowInfo
:ivar status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:vartype status_code: str or ~flow.models.TaskStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar schedule_time:
:vartype schedule_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar real_time_log_path:
:vartype real_time_log_path: str
:ivar has_warnings:
:vartype has_warnings: bool
:ivar composite_node_id:
:vartype composite_node_id: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'is_bypassed': {'key': 'isBypassed', 'type': 'bool'},
'has_failed_child_run': {'key': 'hasFailedChildRun', 'type': 'bool'},
'partially_executed': {'key': 'partiallyExecuted', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'aether_creation_time': {'key': 'aetherCreationTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'run_history_creation_time': {'key': 'runHistoryCreationTime', 'type': 'iso-8601'},
'reuse_info': {'key': 'reuseInfo', 'type': 'TaskReuseInfo'},
'control_flow_info': {'key': 'controlFlowInfo', 'type': 'TaskControlFlowInfo'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'schedule_time': {'key': 'scheduleTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'real_time_log_path': {'key': 'realTimeLogPath', 'type': 'str'},
'has_warnings': {'key': 'hasWarnings', 'type': 'bool'},
'composite_node_id': {'key': 'compositeNodeId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status: str or ~flow.models.TaskStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword is_bypassed:
:paramtype is_bypassed: bool
:keyword has_failed_child_run:
:paramtype has_failed_child_run: bool
:keyword partially_executed:
:paramtype partially_executed: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword aether_creation_time:
:paramtype aether_creation_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword run_history_creation_time:
:paramtype run_history_creation_time: ~datetime.datetime
:keyword reuse_info:
:paramtype reuse_info: ~flow.models.TaskReuseInfo
:keyword control_flow_info:
:paramtype control_flow_info: ~flow.models.TaskControlFlowInfo
:keyword status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status_code: str or ~flow.models.TaskStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword schedule_time:
:paramtype schedule_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword real_time_log_path:
:paramtype real_time_log_path: str
:keyword has_warnings:
:paramtype has_warnings: bool
:keyword composite_node_id:
:paramtype composite_node_id: str
"""
super(GraphNodeStatusInfo, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.run_status = kwargs.get('run_status', None)
self.is_bypassed = kwargs.get('is_bypassed', None)
self.has_failed_child_run = kwargs.get('has_failed_child_run', None)
self.partially_executed = kwargs.get('partially_executed', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.aether_creation_time = kwargs.get('aether_creation_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.run_history_creation_time = kwargs.get('run_history_creation_time', None)
self.reuse_info = kwargs.get('reuse_info', None)
self.control_flow_info = kwargs.get('control_flow_info', None)
self.status_code = kwargs.get('status_code', None)
self.status_detail = kwargs.get('status_detail', None)
self.creation_time = kwargs.get('creation_time', None)
self.schedule_time = kwargs.get('schedule_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.request_id = kwargs.get('request_id', None)
self.run_id = kwargs.get('run_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.real_time_log_path = kwargs.get('real_time_log_path', None)
self.has_warnings = kwargs.get('has_warnings', None)
self.composite_node_id = kwargs.get('composite_node_id', None)
class GraphReferenceNode(msrest.serialization.Model):
"""GraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphReferenceNode, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.id = kwargs.get('id', None)
self.module_id = kwargs.get('module_id', None)
self.comment = kwargs.get('comment', None)
self.name = kwargs.get('name', None)
self.module_parameters = kwargs.get('module_parameters', None)
self.module_metadata_parameters = kwargs.get('module_metadata_parameters', None)
self.module_output_settings = kwargs.get('module_output_settings', None)
self.module_input_settings = kwargs.get('module_input_settings', None)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.use_graph_default_datastore = kwargs.get('use_graph_default_datastore', None)
self.regenerate_output = kwargs.get('regenerate_output', None)
self.control_inputs = kwargs.get('control_inputs', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.execution_phase = kwargs.get('execution_phase', None)
self.run_attribution = kwargs.get('run_attribution', None)
class HdfsReference(msrest.serialization.Model):
"""HdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(HdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
class HdiClusterComputeInfo(msrest.serialization.Model):
"""HdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(HdiClusterComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class HdiConfiguration(msrest.serialization.Model):
"""HdiConfiguration.
:ivar yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:vartype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
_attribute_map = {
'yarn_deploy_mode': {'key': 'yarnDeployMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:paramtype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
super(HdiConfiguration, self).__init__(**kwargs)
self.yarn_deploy_mode = kwargs.get('yarn_deploy_mode', None)
class HdiRunConfiguration(msrest.serialization.Model):
"""HdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(HdiRunConfiguration, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.compute_name = kwargs.get('compute_name', None)
self.queue = kwargs.get('queue', None)
self.driver_memory = kwargs.get('driver_memory', None)
self.driver_cores = kwargs.get('driver_cores', None)
self.executor_memory = kwargs.get('executor_memory', None)
self.executor_cores = kwargs.get('executor_cores', None)
self.number_executors = kwargs.get('number_executors', None)
self.conf = kwargs.get('conf', None)
self.name = kwargs.get('name', None)
class HistoryConfiguration(msrest.serialization.Model):
"""HistoryConfiguration.
:ivar output_collection:
:vartype output_collection: bool
:ivar directories_to_watch:
:vartype directories_to_watch: list[str]
:ivar enable_m_lflow_tracking:
:vartype enable_m_lflow_tracking: bool
"""
_attribute_map = {
'output_collection': {'key': 'outputCollection', 'type': 'bool'},
'directories_to_watch': {'key': 'directoriesToWatch', 'type': '[str]'},
'enable_m_lflow_tracking': {'key': 'enableMLflowTracking', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_collection:
:paramtype output_collection: bool
:keyword directories_to_watch:
:paramtype directories_to_watch: list[str]
:keyword enable_m_lflow_tracking:
:paramtype enable_m_lflow_tracking: bool
"""
super(HistoryConfiguration, self).__init__(**kwargs)
self.output_collection = kwargs.get('output_collection', True)
self.directories_to_watch = kwargs.get('directories_to_watch', ['logs'])
self.enable_m_lflow_tracking = kwargs.get('enable_m_lflow_tracking', True)
class HyperDriveConfiguration(msrest.serialization.Model):
"""HyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(HyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = kwargs.get('hyper_drive_run_config', None)
self.primary_metric_goal = kwargs.get('primary_metric_goal', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.arguments = kwargs.get('arguments', None)
class ICheckableLongRunningOperationResponse(msrest.serialization.Model):
"""ICheckableLongRunningOperationResponse.
:ivar completion_result: Any object.
:vartype completion_result: any
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'completion_result': {'key': 'completionResult', 'type': 'object'},
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion_result: Any object.
:paramtype completion_result: any
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(ICheckableLongRunningOperationResponse, self).__init__(**kwargs)
self.completion_result = kwargs.get('completion_result', None)
self.location = kwargs.get('location', None)
self.operation_result = kwargs.get('operation_result', None)
class IdentityConfiguration(msrest.serialization.Model):
"""IdentityConfiguration.
:ivar type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:vartype type: str or ~flow.models.IdentityType
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar secret:
:vartype secret: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'secret': {'key': 'secret', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:paramtype type: str or ~flow.models.IdentityType
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword secret:
:paramtype secret: str
"""
super(IdentityConfiguration, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.properties = kwargs.get('properties', None)
self.secret = kwargs.get('secret', None)
class IdentitySetting(msrest.serialization.Model):
"""IdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AEVAIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AEVAIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(IdentitySetting, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.client_id = kwargs.get('client_id', None)
self.object_id = kwargs.get('object_id', None)
self.msi_resource_id = kwargs.get('msi_resource_id', None)
class ImportDataTask(msrest.serialization.Model):
"""ImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.DataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'DataTransferSource'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.DataTransferSource
"""
super(ImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = kwargs.get('data_transfer_source', None)
class IndexedErrorResponse(msrest.serialization.Model):
"""IndexedErrorResponse.
:ivar code:
:vartype code: str
:ivar error_code_hierarchy:
:vartype error_code_hierarchy: str
:ivar message:
:vartype message: str
:ivar time:
:vartype time: ~datetime.datetime
:ivar component_name:
:vartype component_name: str
:ivar severity:
:vartype severity: int
:ivar details_uri:
:vartype details_uri: str
:ivar reference_code:
:vartype reference_code: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'error_code_hierarchy': {'key': 'errorCodeHierarchy', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword error_code_hierarchy:
:paramtype error_code_hierarchy: str
:keyword message:
:paramtype message: str
:keyword time:
:paramtype time: ~datetime.datetime
:keyword component_name:
:paramtype component_name: str
:keyword severity:
:paramtype severity: int
:keyword details_uri:
:paramtype details_uri: str
:keyword reference_code:
:paramtype reference_code: str
"""
super(IndexedErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.error_code_hierarchy = kwargs.get('error_code_hierarchy', None)
self.message = kwargs.get('message', None)
self.time = kwargs.get('time', None)
self.component_name = kwargs.get('component_name', None)
self.severity = kwargs.get('severity', None)
self.details_uri = kwargs.get('details_uri', None)
self.reference_code = kwargs.get('reference_code', None)
class InitScriptInfoDto(msrest.serialization.Model):
"""InitScriptInfoDto.
:ivar dbfs:
:vartype dbfs: ~flow.models.DbfsStorageInfoDto
"""
_attribute_map = {
'dbfs': {'key': 'dbfs', 'type': 'DbfsStorageInfoDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dbfs:
:paramtype dbfs: ~flow.models.DbfsStorageInfoDto
"""
super(InitScriptInfoDto, self).__init__(**kwargs)
self.dbfs = kwargs.get('dbfs', None)
class InnerErrorDetails(msrest.serialization.Model):
"""InnerErrorDetails.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(InnerErrorDetails, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
class InnerErrorResponse(msrest.serialization.Model):
"""A nested structure of errors.
:ivar code: The error code.
:vartype code: str
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: The error code.
:paramtype code: str
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
"""
super(InnerErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.inner_error = kwargs.get('inner_error', None)
class InputAsset(msrest.serialization.Model):
"""InputAsset.
:ivar asset:
:vartype asset: ~flow.models.Asset
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'asset': {'key': 'asset', 'type': 'Asset'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset:
:paramtype asset: ~flow.models.Asset
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(InputAsset, self).__init__(**kwargs)
self.asset = kwargs.get('asset', None)
self.mechanism = kwargs.get('mechanism', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.options = kwargs.get('options', None)
class InputData(msrest.serialization.Model):
"""InputData.
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar value:
:vartype value: str
"""
_attribute_map = {
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword value:
:paramtype value: str
"""
super(InputData, self).__init__(**kwargs)
self.dataset_id = kwargs.get('dataset_id', None)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class InputDataBinding(msrest.serialization.Model):
"""InputDataBinding.
:ivar data_id:
:vartype data_id: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar value:
:vartype value: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
"""
_attribute_map = {
'data_id': {'key': 'dataId', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'value': {'key': 'value', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_id:
:paramtype data_id: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword value:
:paramtype value: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
"""
super(InputDataBinding, self).__init__(**kwargs)
self.data_id = kwargs.get('data_id', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.mode = kwargs.get('mode', None)
self.description = kwargs.get('description', None)
self.uri = kwargs.get('uri', None)
self.value = kwargs.get('value', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.job_input_type = kwargs.get('job_input_type', None)
class InputDefinition(msrest.serialization.Model):
"""InputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar enum:
:vartype enum: list[str]
:ivar enabled_by:
:vartype enabled_by: str
:ivar enabled_by_type:
:vartype enabled_by_type: list[str or ~flow.models.ValueType]
:ivar enabled_by_value:
:vartype enabled_by_value: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar dynamic_list:
:vartype dynamic_list: ~flow.models.ToolInputDynamicList
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
:ivar is_multi_select:
:vartype is_multi_select: bool
:ivar generated_by:
:vartype generated_by: ~flow.models.ToolInputGeneratedBy
:ivar input_type: Possible values include: "default", "uionly_hidden".
:vartype input_type: str or ~flow.models.InputType
:ivar advanced:
:vartype advanced: bool
:ivar ui_hints: This is a dictionary.
:vartype ui_hints: dict[str, any]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'enabled_by': {'key': 'enabled_by', 'type': 'str'},
'enabled_by_type': {'key': 'enabled_by_type', 'type': '[str]'},
'enabled_by_value': {'key': 'enabled_by_value', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'dynamic_list': {'key': 'dynamic_list', 'type': 'ToolInputDynamicList'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
'is_multi_select': {'key': 'is_multi_select', 'type': 'bool'},
'generated_by': {'key': 'generated_by', 'type': 'ToolInputGeneratedBy'},
'input_type': {'key': 'input_type', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'ui_hints': {'key': 'ui_hints', 'type': '{object}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword enum:
:paramtype enum: list[str]
:keyword enabled_by:
:paramtype enabled_by: str
:keyword enabled_by_type:
:paramtype enabled_by_type: list[str or ~flow.models.ValueType]
:keyword enabled_by_value:
:paramtype enabled_by_value: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword dynamic_list:
:paramtype dynamic_list: ~flow.models.ToolInputDynamicList
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
:keyword is_multi_select:
:paramtype is_multi_select: bool
:keyword generated_by:
:paramtype generated_by: ~flow.models.ToolInputGeneratedBy
:keyword input_type: Possible values include: "default", "uionly_hidden".
:paramtype input_type: str or ~flow.models.InputType
:keyword advanced:
:paramtype advanced: bool
:keyword ui_hints: This is a dictionary.
:paramtype ui_hints: dict[str, any]
"""
super(InputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.default = kwargs.get('default', None)
self.description = kwargs.get('description', None)
self.enum = kwargs.get('enum', None)
self.enabled_by = kwargs.get('enabled_by', None)
self.enabled_by_type = kwargs.get('enabled_by_type', None)
self.enabled_by_value = kwargs.get('enabled_by_value', None)
self.model_list = kwargs.get('model_list', None)
self.capabilities = kwargs.get('capabilities', None)
self.dynamic_list = kwargs.get('dynamic_list', None)
self.allow_manual_entry = kwargs.get('allow_manual_entry', None)
self.is_multi_select = kwargs.get('is_multi_select', None)
self.generated_by = kwargs.get('generated_by', None)
self.input_type = kwargs.get('input_type', None)
self.advanced = kwargs.get('advanced', None)
self.ui_hints = kwargs.get('ui_hints', None)
class InputOutputPortMetadata(msrest.serialization.Model):
"""InputOutputPortMetadata.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar graph_module_node_id:
:vartype graph_module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar schema:
:vartype schema: str
:ivar name:
:vartype name: str
:ivar id:
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'graph_module_node_id': {'key': 'graphModuleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'schema': {'key': 'schema', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_module_node_id:
:paramtype graph_module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword schema:
:paramtype schema: str
:keyword name:
:paramtype name: str
"""
super(InputOutputPortMetadata, self).__init__(**kwargs)
self.graph_module_node_id = kwargs.get('graph_module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.schema = kwargs.get('schema', None)
self.name = kwargs.get('name', None)
self.id = None
class InputSetting(msrest.serialization.Model):
"""InputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(InputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.options = kwargs.get('options', None)
self.additional_transformations = kwargs.get('additional_transformations', None)
class IntellectualPropertyPublisherInformation(msrest.serialization.Model):
"""IntellectualPropertyPublisherInformation.
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(IntellectualPropertyPublisherInformation, self).__init__(**kwargs)
self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None)
class InteractiveConfig(msrest.serialization.Model):
"""InteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class InteractiveConfiguration(msrest.serialization.Model):
"""InteractiveConfiguration.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfiguration, self).__init__(**kwargs)
self.is_ssh_enabled = kwargs.get('is_ssh_enabled', None)
self.ssh_public_key = kwargs.get('ssh_public_key', None)
self.is_i_python_enabled = kwargs.get('is_i_python_enabled', None)
self.is_tensor_board_enabled = kwargs.get('is_tensor_board_enabled', None)
self.interactive_port = kwargs.get('interactive_port', None)
class JobCost(msrest.serialization.Model):
"""JobCost.
:ivar charged_cpu_core_seconds:
:vartype charged_cpu_core_seconds: float
:ivar charged_cpu_memory_megabyte_seconds:
:vartype charged_cpu_memory_megabyte_seconds: float
:ivar charged_gpu_seconds:
:vartype charged_gpu_seconds: float
:ivar charged_node_utilization_seconds:
:vartype charged_node_utilization_seconds: float
"""
_attribute_map = {
'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'},
'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'},
'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'},
'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword charged_cpu_core_seconds:
:paramtype charged_cpu_core_seconds: float
:keyword charged_cpu_memory_megabyte_seconds:
:paramtype charged_cpu_memory_megabyte_seconds: float
:keyword charged_gpu_seconds:
:paramtype charged_gpu_seconds: float
:keyword charged_node_utilization_seconds:
:paramtype charged_node_utilization_seconds: float
"""
super(JobCost, self).__init__(**kwargs)
self.charged_cpu_core_seconds = kwargs.get('charged_cpu_core_seconds', None)
self.charged_cpu_memory_megabyte_seconds = kwargs.get('charged_cpu_memory_megabyte_seconds', None)
self.charged_gpu_seconds = kwargs.get('charged_gpu_seconds', None)
self.charged_node_utilization_seconds = kwargs.get('charged_node_utilization_seconds', None)
class JobEndpoint(msrest.serialization.Model):
"""JobEndpoint.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar endpoint:
:vartype endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.MfeInternalNodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'MfeInternalNodes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword endpoint:
:paramtype endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.MfeInternalNodes
"""
super(JobEndpoint, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.port = kwargs.get('port', None)
self.endpoint = kwargs.get('endpoint', None)
self.status = kwargs.get('status', None)
self.error_message = kwargs.get('error_message', None)
self.properties = kwargs.get('properties', None)
self.nodes = kwargs.get('nodes', None)
class JobInput(msrest.serialization.Model):
"""JobInput.
All required parameters must be populated in order to send to Azure.
:ivar job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
:ivar description:
:vartype description: str
"""
_validation = {
'job_input_type': {'required': True},
}
_attribute_map = {
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
:keyword description:
:paramtype description: str
"""
super(JobInput, self).__init__(**kwargs)
self.job_input_type = kwargs['job_input_type']
self.description = kwargs.get('description', None)
class JobOutput(msrest.serialization.Model):
"""JobOutput.
All required parameters must be populated in order to send to Azure.
:ivar job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar description:
:vartype description: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_validation = {
'job_output_type': {'required': True},
}
_attribute_map = {
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword description:
:paramtype description: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(JobOutput, self).__init__(**kwargs)
self.job_output_type = kwargs['job_output_type']
self.description = kwargs.get('description', None)
self.auto_delete_setting = kwargs.get('auto_delete_setting', None)
class JobOutputArtifacts(msrest.serialization.Model):
"""JobOutputArtifacts.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path:
:paramtype path: str
"""
super(JobOutputArtifacts, self).__init__(**kwargs)
self.datastore_id = kwargs.get('datastore_id', None)
self.path = kwargs.get('path', None)
class JobScheduleDto(msrest.serialization.Model):
"""JobScheduleDto.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar job_definition_id:
:vartype job_definition_id: str
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'job_definition_id': {'key': 'jobDefinitionId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword job_definition_id:
:paramtype job_definition_id: str
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(JobScheduleDto, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.system_data = kwargs.get('system_data', None)
self.name = kwargs.get('name', None)
self.job_definition_id = kwargs.get('job_definition_id', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class K8SConfiguration(msrest.serialization.Model):
"""K8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.PriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'PriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfig'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.PriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfig
"""
super(K8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
self.resource_configuration = kwargs.get('resource_configuration', None)
self.priority_configuration = kwargs.get('priority_configuration', None)
self.interactive_configuration = kwargs.get('interactive_configuration', None)
class KeyValuePairComponentNameMetaInfoErrorResponse(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoErrorResponse.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value: The error response.
:vartype value: ~flow.models.ErrorResponse
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ErrorResponse'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value: The error response.
:paramtype value: ~flow.models.ErrorResponse
"""
super(KeyValuePairComponentNameMetaInfoErrorResponse, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KeyValuePairComponentNameMetaInfoModuleDto(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoModuleDto.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value:
:vartype value: ~flow.models.ModuleDto
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ModuleDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value:
:paramtype value: ~flow.models.ModuleDto
"""
super(KeyValuePairComponentNameMetaInfoModuleDto, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KeyValuePairStringObject(msrest.serialization.Model):
"""KeyValuePairStringObject.
:ivar key:
:vartype key: str
:ivar value: Anything.
:vartype value: any
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value: Anything.
:paramtype value: any
"""
super(KeyValuePairStringObject, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class KubernetesConfiguration(msrest.serialization.Model):
"""KubernetesConfiguration.
:ivar instance_type:
:vartype instance_type: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
"""
super(KubernetesConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
class Kwarg(msrest.serialization.Model):
"""Kwarg.
:ivar key:
:vartype key: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value:
:paramtype value: str
"""
super(Kwarg, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class LegacyDataPath(msrest.serialization.Model):
"""LegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(LegacyDataPath, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.relative_path = kwargs.get('relative_path', None)
class LimitSettings(msrest.serialization.Model):
"""LimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(LimitSettings, self).__init__(**kwargs)
self.max_trials = kwargs.get('max_trials', None)
self.timeout = kwargs.get('timeout', None)
self.trial_timeout = kwargs.get('trial_timeout', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
self.max_cores_per_trial = kwargs.get('max_cores_per_trial', None)
self.exit_score = kwargs.get('exit_score', None)
self.enable_early_termination = kwargs.get('enable_early_termination', None)
self.max_nodes = kwargs.get('max_nodes', None)
class LinkedADBWorkspaceMetadata(msrest.serialization.Model):
"""LinkedADBWorkspaceMetadata.
:ivar workspace_id:
:vartype workspace_id: str
:ivar region:
:vartype region: str
"""
_attribute_map = {
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_id:
:paramtype workspace_id: str
:keyword region:
:paramtype region: str
"""
super(LinkedADBWorkspaceMetadata, self).__init__(**kwargs)
self.workspace_id = kwargs.get('workspace_id', None)
self.region = kwargs.get('region', None)
class LinkedPipelineInfo(msrest.serialization.Model):
"""LinkedPipelineInfo.
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar is_direct_link:
:vartype is_direct_link: bool
"""
_attribute_map = {
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'is_direct_link': {'key': 'isDirectLink', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword is_direct_link:
:paramtype is_direct_link: bool
"""
super(LinkedPipelineInfo, self).__init__(**kwargs)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.port_name = kwargs.get('port_name', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.is_direct_link = kwargs.get('is_direct_link', None)
class LoadFlowAsComponentRequest(msrest.serialization.Model):
"""LoadFlowAsComponentRequest.
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar node_variant:
:vartype node_variant: str
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar session_id:
:vartype session_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword node_variant:
:paramtype node_variant: str
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword session_id:
:paramtype session_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
"""
super(LoadFlowAsComponentRequest, self).__init__(**kwargs)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None)
self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None)
self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None)
self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None)
self.node_variant = kwargs.get('node_variant', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.session_id = kwargs.get('session_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
class LogRunTerminatedEventDto(msrest.serialization.Model):
"""LogRunTerminatedEventDto.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
:ivar last_checked_time:
:vartype last_checked_time: ~datetime.datetime
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
:keyword last_checked_time:
:paramtype last_checked_time: ~datetime.datetime
"""
super(LogRunTerminatedEventDto, self).__init__(**kwargs)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
self.last_checked_time = kwargs.get('last_checked_time', None)
class LongRunningOperationUriResponse(msrest.serialization.Model):
"""LongRunningOperationUriResponse.
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(LongRunningOperationUriResponse, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.operation_result = kwargs.get('operation_result', None)
class LongRunningUpdateRegistryComponentRequest(msrest.serialization.Model):
"""LongRunningUpdateRegistryComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype update_type: str or ~flow.models.LongRunningUpdateType
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype update_type: str or ~flow.models.LongRunningUpdateType
"""
super(LongRunningUpdateRegistryComponentRequest, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.registry_name = kwargs.get('registry_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.update_type = kwargs.get('update_type', None)
class ManagedServiceIdentity(msrest.serialization.Model):
"""ManagedServiceIdentity.
All required parameters must be populated in order to send to Azure.
:ivar type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:vartype type: str or ~flow.models.ManagedServiceIdentityType
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:vartype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:paramtype type: str or ~flow.models.ManagedServiceIdentityType
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:paramtype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = kwargs['type']
self.principal_id = kwargs.get('principal_id', None)
self.tenant_id = kwargs.get('tenant_id', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class MavenLibraryDto(msrest.serialization.Model):
"""MavenLibraryDto.
:ivar coordinates:
:vartype coordinates: str
:ivar repo:
:vartype repo: str
:ivar exclusions:
:vartype exclusions: list[str]
"""
_attribute_map = {
'coordinates': {'key': 'coordinates', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
'exclusions': {'key': 'exclusions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword coordinates:
:paramtype coordinates: str
:keyword repo:
:paramtype repo: str
:keyword exclusions:
:paramtype exclusions: list[str]
"""
super(MavenLibraryDto, self).__init__(**kwargs)
self.coordinates = kwargs.get('coordinates', None)
self.repo = kwargs.get('repo', None)
self.exclusions = kwargs.get('exclusions', None)
class MetricProperties(msrest.serialization.Model):
"""MetricProperties.
:ivar ux_metric_type:
:vartype ux_metric_type: str
"""
_attribute_map = {
'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ux_metric_type:
:paramtype ux_metric_type: str
"""
super(MetricProperties, self).__init__(**kwargs)
self.ux_metric_type = kwargs.get('ux_metric_type', None)
class MetricSchemaDto(msrest.serialization.Model):
"""MetricSchemaDto.
:ivar num_properties:
:vartype num_properties: int
:ivar properties:
:vartype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
_attribute_map = {
'num_properties': {'key': 'numProperties', 'type': 'int'},
'properties': {'key': 'properties', 'type': '[MetricSchemaPropertyDto]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword num_properties:
:paramtype num_properties: int
:keyword properties:
:paramtype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
super(MetricSchemaDto, self).__init__(**kwargs)
self.num_properties = kwargs.get('num_properties', None)
self.properties = kwargs.get('properties', None)
class MetricSchemaPropertyDto(msrest.serialization.Model):
"""MetricSchemaPropertyDto.
:ivar property_id:
:vartype property_id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'property_id': {'key': 'propertyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword property_id:
:paramtype property_id: str
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(MetricSchemaPropertyDto, self).__init__(**kwargs)
self.property_id = kwargs.get('property_id', None)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
class MetricV2Dto(msrest.serialization.Model):
"""MetricV2Dto.
:ivar data_container_id:
:vartype data_container_id: str
:ivar name:
:vartype name: str
:ivar columns: This is a dictionary.
:vartype columns: dict[str, str or ~flow.models.MetricValueType]
:ivar properties:
:vartype properties: ~flow.models.MetricProperties
:ivar namespace:
:vartype namespace: str
:ivar standard_schema_id:
:vartype standard_schema_id: str
:ivar value:
:vartype value: list[~flow.models.MetricV2Value]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'columns': {'key': 'columns', 'type': '{str}'},
'properties': {'key': 'properties', 'type': 'MetricProperties'},
'namespace': {'key': 'namespace', 'type': 'str'},
'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
'value': {'key': 'value', 'type': '[MetricV2Value]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword name:
:paramtype name: str
:keyword columns: This is a dictionary.
:paramtype columns: dict[str, str or ~flow.models.MetricValueType]
:keyword properties:
:paramtype properties: ~flow.models.MetricProperties
:keyword namespace:
:paramtype namespace: str
:keyword standard_schema_id:
:paramtype standard_schema_id: str
:keyword value:
:paramtype value: list[~flow.models.MetricV2Value]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(MetricV2Dto, self).__init__(**kwargs)
self.data_container_id = kwargs.get('data_container_id', None)
self.name = kwargs.get('name', None)
self.columns = kwargs.get('columns', None)
self.properties = kwargs.get('properties', None)
self.namespace = kwargs.get('namespace', None)
self.standard_schema_id = kwargs.get('standard_schema_id', None)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class MetricV2Value(msrest.serialization.Model):
"""MetricV2Value.
:ivar metric_id:
:vartype metric_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar step:
:vartype step: long
:ivar data: Dictionary of :code:`<any>`.
:vartype data: dict[str, any]
:ivar sas_uri:
:vartype sas_uri: str
"""
_attribute_map = {
'metric_id': {'key': 'metricId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'step': {'key': 'step', 'type': 'long'},
'data': {'key': 'data', 'type': '{object}'},
'sas_uri': {'key': 'sasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword metric_id:
:paramtype metric_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword step:
:paramtype step: long
:keyword data: Dictionary of :code:`<any>`.
:paramtype data: dict[str, any]
:keyword sas_uri:
:paramtype sas_uri: str
"""
super(MetricV2Value, self).__init__(**kwargs)
self.metric_id = kwargs.get('metric_id', None)
self.created_utc = kwargs.get('created_utc', None)
self.step = kwargs.get('step', None)
self.data = kwargs.get('data', None)
self.sas_uri = kwargs.get('sas_uri', None)
class MfeInternalAutologgerSettings(msrest.serialization.Model):
"""MfeInternalAutologgerSettings.
:ivar mlflow_autologger: Possible values include: "Enabled", "Disabled".
:vartype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
_attribute_map = {
'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlflow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
super(MfeInternalAutologgerSettings, self).__init__(**kwargs)
self.mlflow_autologger = kwargs.get('mlflow_autologger', None)
class MfeInternalIdentityConfiguration(msrest.serialization.Model):
"""MfeInternalIdentityConfiguration.
:ivar identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:vartype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
_attribute_map = {
'identity_type': {'key': 'identityType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:paramtype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
super(MfeInternalIdentityConfiguration, self).__init__(**kwargs)
self.identity_type = kwargs.get('identity_type', None)
class MfeInternalNodes(msrest.serialization.Model):
"""MfeInternalNodes.
:ivar nodes_value_type: The only acceptable values to pass in are None and "All". The default
value is None.
:vartype nodes_value_type: str
"""
_attribute_map = {
'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes_value_type: The only acceptable values to pass in are None and "All". The
default value is None.
:paramtype nodes_value_type: str
"""
super(MfeInternalNodes, self).__init__(**kwargs)
self.nodes_value_type = kwargs.get('nodes_value_type', None)
class MfeInternalOutputData(msrest.serialization.Model):
"""MfeInternalOutputData.
:ivar dataset_name:
:vartype dataset_name: str
:ivar datastore:
:vartype datastore: str
:ivar datapath:
:vartype datapath: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
"""
_attribute_map = {
'dataset_name': {'key': 'datasetName', 'type': 'str'},
'datastore': {'key': 'datastore', 'type': 'str'},
'datapath': {'key': 'datapath', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword dataset_name:
:paramtype dataset_name: str
:keyword datastore:
:paramtype datastore: str
:keyword datapath:
:paramtype datapath: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
"""
super(MfeInternalOutputData, self).__init__(**kwargs)
self.dataset_name = kwargs.get('dataset_name', None)
self.datastore = kwargs.get('datastore', None)
self.datapath = kwargs.get('datapath', None)
self.mode = kwargs.get('mode', None)
class MfeInternalSecretConfiguration(msrest.serialization.Model):
"""MfeInternalSecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(MfeInternalSecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = kwargs.get('workspace_secret_name', None)
self.uri = kwargs.get('uri', None)
class MfeInternalUriReference(msrest.serialization.Model):
"""MfeInternalUriReference.
:ivar file:
:vartype file: str
:ivar folder:
:vartype folder: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'folder': {'key': 'folder', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword folder:
:paramtype folder: str
"""
super(MfeInternalUriReference, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.folder = kwargs.get('folder', None)
class MfeInternalV20211001ComponentJob(msrest.serialization.Model):
"""MfeInternalV20211001ComponentJob.
:ivar compute_id:
:vartype compute_id: str
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar overrides: Anything.
:vartype overrides: any
"""
_attribute_map = {
'compute_id': {'key': 'computeId', 'type': 'str'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'overrides': {'key': 'overrides', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_id:
:paramtype compute_id: str
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword overrides: Anything.
:paramtype overrides: any
"""
super(MfeInternalV20211001ComponentJob, self).__init__(**kwargs)
self.compute_id = kwargs.get('compute_id', None)
self.component_id = kwargs.get('component_id', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.overrides = kwargs.get('overrides', None)
class MinMaxParameterRule(msrest.serialization.Model):
"""MinMaxParameterRule.
:ivar min:
:vartype min: float
:ivar max:
:vartype max: float
"""
_attribute_map = {
'min': {'key': 'min', 'type': 'float'},
'max': {'key': 'max', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword min:
:paramtype min: float
:keyword max:
:paramtype max: float
"""
super(MinMaxParameterRule, self).__init__(**kwargs)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class MlcComputeInfo(msrest.serialization.Model):
"""MlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(MlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
class ModelDto(msrest.serialization.Model):
"""ModelDto.
:ivar feed_name:
:vartype feed_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar id:
:vartype id: str
:ivar version:
:vartype version: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar online_endpoint_yaml_str:
:vartype online_endpoint_yaml_str: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'online_endpoint_yaml_str': {'key': 'onlineEndpointYamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword id:
:paramtype id: str
:keyword version:
:paramtype version: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword online_endpoint_yaml_str:
:paramtype online_endpoint_yaml_str: str
"""
super(ModelDto, self).__init__(**kwargs)
self.feed_name = kwargs.get('feed_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.aml_data_store_name = kwargs.get('aml_data_store_name', None)
self.relative_path = kwargs.get('relative_path', None)
self.id = kwargs.get('id', None)
self.version = kwargs.get('version', None)
self.system_data = kwargs.get('system_data', None)
self.arm_id = kwargs.get('arm_id', None)
self.online_endpoint_yaml_str = kwargs.get('online_endpoint_yaml_str', None)
class ModelManagementErrorResponse(msrest.serialization.Model):
"""ModelManagementErrorResponse.
:ivar code:
:vartype code: str
:ivar status_code:
:vartype status_code: int
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.InnerErrorDetails]
:ivar correlation: Dictionary of :code:`<string>`.
:vartype correlation: dict[str, str]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'status_code': {'key': 'statusCode', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[InnerErrorDetails]'},
'correlation': {'key': 'correlation', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword status_code:
:paramtype status_code: int
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
:keyword details:
:paramtype details: list[~flow.models.InnerErrorDetails]
:keyword correlation: Dictionary of :code:`<string>`.
:paramtype correlation: dict[str, str]
"""
super(ModelManagementErrorResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.status_code = kwargs.get('status_code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.correlation = kwargs.get('correlation', None)
class ModifyPipelineJobScheduleDto(msrest.serialization.Model):
"""ModifyPipelineJobScheduleDto.
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(ModifyPipelineJobScheduleDto, self).__init__(**kwargs)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class ModuleDto(msrest.serialization.Model):
"""ModuleDto.
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDto, self).__init__(**kwargs)
self.namespace = kwargs.get('namespace', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.dict_tags = kwargs.get('dict_tags', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
self.module_name = kwargs.get('module_name', None)
self.module_version = kwargs.get('module_version', None)
self.description = kwargs.get('description', None)
self.owner = kwargs.get('owner', None)
self.job_type = kwargs.get('job_type', None)
self.default_version = kwargs.get('default_version', None)
self.family_id = kwargs.get('family_id', None)
self.help_document = kwargs.get('help_document', None)
self.codegen_by = kwargs.get('codegen_by', None)
self.arm_id = kwargs.get('arm_id', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_entity = kwargs.get('module_entity', None)
self.input_types = kwargs.get('input_types', None)
self.output_types = kwargs.get('output_types', None)
self.entity_status = kwargs.get('entity_status', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.yaml_link = kwargs.get('yaml_link', None)
self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None)
self.module_source_type = kwargs.get('module_source_type', None)
self.registered_by = kwargs.get('registered_by', None)
self.versions = kwargs.get('versions', None)
self.is_default_module_version = kwargs.get('is_default_module_version', None)
self.system_data = kwargs.get('system_data', None)
self.system_meta = kwargs.get('system_meta', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.entry = kwargs.get('entry', None)
self.os_type = kwargs.get('os_type', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.module_python_interface = kwargs.get('module_python_interface', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.run_setting_parameters = kwargs.get('run_setting_parameters', None)
self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None)
self.output_setting_specs = kwargs.get('output_setting_specs', None)
self.yaml_str = kwargs.get('yaml_str', None)
class ModuleDtoWithErrors(msrest.serialization.Model):
"""ModuleDtoWithErrors.
:ivar version_id_to_module_dto: This is a dictionary.
:vartype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:ivar name_and_version_to_module_dto:
:vartype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:ivar version_id_to_error: This is a dictionary.
:vartype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:ivar name_and_version_to_error:
:vartype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
_attribute_map = {
'version_id_to_module_dto': {'key': 'versionIdToModuleDto', 'type': '{ModuleDto}'},
'name_and_version_to_module_dto': {'key': 'nameAndVersionToModuleDto', 'type': '[KeyValuePairComponentNameMetaInfoModuleDto]'},
'version_id_to_error': {'key': 'versionIdToError', 'type': '{ErrorResponse}'},
'name_and_version_to_error': {'key': 'nameAndVersionToError', 'type': '[KeyValuePairComponentNameMetaInfoErrorResponse]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword version_id_to_module_dto: This is a dictionary.
:paramtype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:keyword name_and_version_to_module_dto:
:paramtype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:keyword version_id_to_error: This is a dictionary.
:paramtype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:keyword name_and_version_to_error:
:paramtype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
super(ModuleDtoWithErrors, self).__init__(**kwargs)
self.version_id_to_module_dto = kwargs.get('version_id_to_module_dto', None)
self.name_and_version_to_module_dto = kwargs.get('name_and_version_to_module_dto', None)
self.version_id_to_error = kwargs.get('version_id_to_error', None)
self.name_and_version_to_error = kwargs.get('name_and_version_to_error', None)
class ModuleDtoWithValidateStatus(msrest.serialization.Model):
"""ModuleDtoWithValidateStatus.
:ivar existing_module_entity:
:vartype existing_module_entity: ~flow.models.ModuleEntity
:ivar status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:vartype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:ivar status_details:
:vartype status_details: str
:ivar error_details:
:vartype error_details: list[str]
:ivar serialized_module_info:
:vartype serialized_module_info: str
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'existing_module_entity': {'key': 'existingModuleEntity', 'type': 'ModuleEntity'},
'status': {'key': 'status', 'type': 'str'},
'status_details': {'key': 'statusDetails', 'type': 'str'},
'error_details': {'key': 'errorDetails', 'type': '[str]'},
'serialized_module_info': {'key': 'serializedModuleInfo', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword existing_module_entity:
:paramtype existing_module_entity: ~flow.models.ModuleEntity
:keyword status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:paramtype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:keyword status_details:
:paramtype status_details: str
:keyword error_details:
:paramtype error_details: list[str]
:keyword serialized_module_info:
:paramtype serialized_module_info: str
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDtoWithValidateStatus, self).__init__(**kwargs)
self.existing_module_entity = kwargs.get('existing_module_entity', None)
self.status = kwargs.get('status', None)
self.status_details = kwargs.get('status_details', None)
self.error_details = kwargs.get('error_details', None)
self.serialized_module_info = kwargs.get('serialized_module_info', None)
self.namespace = kwargs.get('namespace', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.dict_tags = kwargs.get('dict_tags', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.feed_name = kwargs.get('feed_name', None)
self.registry_name = kwargs.get('registry_name', None)
self.module_name = kwargs.get('module_name', None)
self.module_version = kwargs.get('module_version', None)
self.description = kwargs.get('description', None)
self.owner = kwargs.get('owner', None)
self.job_type = kwargs.get('job_type', None)
self.default_version = kwargs.get('default_version', None)
self.family_id = kwargs.get('family_id', None)
self.help_document = kwargs.get('help_document', None)
self.codegen_by = kwargs.get('codegen_by', None)
self.arm_id = kwargs.get('arm_id', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_entity = kwargs.get('module_entity', None)
self.input_types = kwargs.get('input_types', None)
self.output_types = kwargs.get('output_types', None)
self.entity_status = kwargs.get('entity_status', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.yaml_link = kwargs.get('yaml_link', None)
self.yaml_link_with_commit_sha = kwargs.get('yaml_link_with_commit_sha', None)
self.module_source_type = kwargs.get('module_source_type', None)
self.registered_by = kwargs.get('registered_by', None)
self.versions = kwargs.get('versions', None)
self.is_default_module_version = kwargs.get('is_default_module_version', None)
self.system_data = kwargs.get('system_data', None)
self.system_meta = kwargs.get('system_meta', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.entry = kwargs.get('entry', None)
self.os_type = kwargs.get('os_type', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.module_python_interface = kwargs.get('module_python_interface', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.run_setting_parameters = kwargs.get('run_setting_parameters', None)
self.supported_ui_input_data_delivery_modes = kwargs.get('supported_ui_input_data_delivery_modes', None)
self.output_setting_specs = kwargs.get('output_setting_specs', None)
self.yaml_str = kwargs.get('yaml_str', None)
class ModuleEntity(msrest.serialization.Model):
"""ModuleEntity.
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.UploadState
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar structured_interface:
:vartype structured_interface: ~flow.models.StructuredInterface
:ivar data_location:
:vartype data_location: ~flow.models.DataLocation
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'structured_interface': {'key': 'structuredInterface', 'type': 'StructuredInterface'},
'data_location': {'key': 'dataLocation', 'type': 'DataLocation'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.UploadState
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.StructuredInterface
:keyword data_location:
:paramtype data_location: ~flow.models.DataLocation
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ModuleEntity, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.module_execution_type = kwargs.get('module_execution_type', None)
self.module_type = kwargs.get('module_type', None)
self.module_type_version = kwargs.get('module_type_version', None)
self.upload_state = kwargs.get('upload_state', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.structured_interface = kwargs.get('structured_interface', None)
self.data_location = kwargs.get('data_location', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.runconfig = kwargs.get('runconfig', None)
self.cloud_settings = kwargs.get('cloud_settings', None)
self.category = kwargs.get('category', None)
self.step_type = kwargs.get('step_type', None)
self.stage = kwargs.get('stage', None)
self.name = kwargs.get('name', None)
self.hash = kwargs.get('hash', None)
self.description = kwargs.get('description', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class ModulePythonInterface(msrest.serialization.Model):
"""ModulePythonInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.PythonInterfaceMapping]
:ivar outputs:
:vartype outputs: list[~flow.models.PythonInterfaceMapping]
:ivar parameters:
:vartype parameters: list[~flow.models.PythonInterfaceMapping]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[PythonInterfaceMapping]'},
'outputs': {'key': 'outputs', 'type': '[PythonInterfaceMapping]'},
'parameters': {'key': 'parameters', 'type': '[PythonInterfaceMapping]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.PythonInterfaceMapping]
:keyword outputs:
:paramtype outputs: list[~flow.models.PythonInterfaceMapping]
:keyword parameters:
:paramtype parameters: list[~flow.models.PythonInterfaceMapping]
"""
super(ModulePythonInterface, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.parameters = kwargs.get('parameters', None)
class MpiConfiguration(msrest.serialization.Model):
"""MpiConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(MpiConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class NCrossValidations(msrest.serialization.Model):
"""NCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.NCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.NCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(NCrossValidations, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class Node(msrest.serialization.Model):
"""Node.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar tool:
:vartype tool: str
:ivar reduce:
:vartype reduce: bool
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'tool': {'key': 'tool', 'type': 'str'},
'reduce': {'key': 'reduce', 'type': 'bool'},
'activate': {'key': 'activate', 'type': 'Activate'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword tool:
:paramtype tool: str
:keyword reduce:
:paramtype reduce: bool
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
"""
super(Node, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.source = kwargs.get('source', None)
self.inputs = kwargs.get('inputs', None)
self.tool = kwargs.get('tool', None)
self.reduce = kwargs.get('reduce', None)
self.activate = kwargs.get('activate', None)
self.comment = kwargs.get('comment', None)
self.api = kwargs.get('api', None)
self.provider = kwargs.get('provider', None)
self.connection = kwargs.get('connection', None)
self.module = kwargs.get('module', None)
class NodeInputPort(msrest.serialization.Model):
"""NodeInputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_types_ids:
:vartype data_types_ids: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_types_ids': {'key': 'dataTypesIds', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_types_ids:
:paramtype data_types_ids: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(NodeInputPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.data_types_ids = kwargs.get('data_types_ids', None)
self.is_optional = kwargs.get('is_optional', None)
class NodeLayout(msrest.serialization.Model):
"""NodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword extended_data:
:paramtype extended_data: str
"""
super(NodeLayout, self).__init__(**kwargs)
self.x = kwargs.get('x', None)
self.y = kwargs.get('y', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
self.extended_data = kwargs.get('extended_data', None)
class NodeOutputPort(msrest.serialization.Model):
"""NodeOutputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_input_name:
:vartype pass_through_input_name: str
:ivar early_available:
:vartype early_available: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_input_name': {'key': 'passThroughInputName', 'type': 'str'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_input_name:
:paramtype pass_through_input_name: str
:keyword early_available:
:paramtype early_available: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
super(NodeOutputPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_input_name = kwargs.get('pass_through_input_name', None)
self.early_available = kwargs.get('early_available', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
class NodePortInterface(msrest.serialization.Model):
"""NodePortInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.NodeInputPort]
:ivar outputs:
:vartype outputs: list[~flow.models.NodeOutputPort]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[NodeInputPort]'},
'outputs': {'key': 'outputs', 'type': '[NodeOutputPort]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.NodeInputPort]
:keyword outputs:
:paramtype outputs: list[~flow.models.NodeOutputPort]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
"""
super(NodePortInterface, self).__init__(**kwargs)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
class Nodes(msrest.serialization.Model):
"""Nodes.
All required parameters must be populated in order to send to Azure.
:ivar nodes_value_type: Required. Possible values include: "All", "Custom".
:vartype nodes_value_type: str or ~flow.models.NodesValueType
:ivar values:
:vartype values: list[int]
"""
_validation = {
'nodes_value_type': {'required': True},
}
_attribute_map = {
'nodes_value_type': {'key': 'nodes_value_type', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword nodes_value_type: Required. Possible values include: "All", "Custom".
:paramtype nodes_value_type: str or ~flow.models.NodesValueType
:keyword values:
:paramtype values: list[int]
"""
super(Nodes, self).__init__(**kwargs)
self.nodes_value_type = kwargs['nodes_value_type']
self.values = kwargs.get('values', None)
class NodeSource(msrest.serialization.Model):
"""NodeSource.
:ivar type:
:vartype type: str
:ivar tool:
:vartype tool: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tool': {'key': 'tool', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword tool:
:paramtype tool: str
:keyword path:
:paramtype path: str
"""
super(NodeSource, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.tool = kwargs.get('tool', None)
self.path = kwargs.get('path', None)
class NodeTelemetryMetaInfo(msrest.serialization.Model):
"""NodeTelemetryMetaInfo.
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar version_id:
:vartype version_id: str
:ivar node_type:
:vartype node_type: str
:ivar node_source:
:vartype node_source: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar is_pipeline_component:
:vartype is_pipeline_component: bool
"""
_attribute_map = {
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'version_id': {'key': 'versionId', 'type': 'str'},
'node_type': {'key': 'nodeType', 'type': 'str'},
'node_source': {'key': 'nodeSource', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'is_pipeline_component': {'key': 'isPipelineComponent', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword version_id:
:paramtype version_id: str
:keyword node_type:
:paramtype node_type: str
:keyword node_source:
:paramtype node_source: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword is_pipeline_component:
:paramtype is_pipeline_component: bool
"""
super(NodeTelemetryMetaInfo, self).__init__(**kwargs)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.node_id = kwargs.get('node_id', None)
self.version_id = kwargs.get('version_id', None)
self.node_type = kwargs.get('node_type', None)
self.node_source = kwargs.get('node_source', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.is_pipeline_component = kwargs.get('is_pipeline_component', None)
class NodeVariant(msrest.serialization.Model):
"""NodeVariant.
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.VariantNode]
:ivar default_variant_id:
:vartype default_variant_id: str
"""
_attribute_map = {
'variants': {'key': 'variants', 'type': '{VariantNode}'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.VariantNode]
:keyword default_variant_id:
:paramtype default_variant_id: str
"""
super(NodeVariant, self).__init__(**kwargs)
self.variants = kwargs.get('variants', None)
self.default_variant_id = kwargs.get('default_variant_id', None)
class NoteBookTaskDto(msrest.serialization.Model):
"""NoteBookTaskDto.
:ivar notebook_path:
:vartype notebook_path: str
:ivar base_parameters: Dictionary of :code:`<string>`.
:vartype base_parameters: dict[str, str]
"""
_attribute_map = {
'notebook_path': {'key': 'notebook_path', 'type': 'str'},
'base_parameters': {'key': 'base_parameters', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword notebook_path:
:paramtype notebook_path: str
:keyword base_parameters: Dictionary of :code:`<string>`.
:paramtype base_parameters: dict[str, str]
"""
super(NoteBookTaskDto, self).__init__(**kwargs)
self.notebook_path = kwargs.get('notebook_path', None)
self.base_parameters = kwargs.get('base_parameters', None)
class NotificationSetting(msrest.serialization.Model):
"""NotificationSetting.
:ivar emails:
:vartype emails: list[str]
:ivar email_on:
:vartype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:ivar webhooks: Dictionary of :code:`<Webhook>`.
:vartype webhooks: dict[str, ~flow.models.Webhook]
"""
_attribute_map = {
'emails': {'key': 'emails', 'type': '[str]'},
'email_on': {'key': 'emailOn', 'type': '[str]'},
'webhooks': {'key': 'webhooks', 'type': '{Webhook}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword emails:
:paramtype emails: list[str]
:keyword email_on:
:paramtype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:keyword webhooks: Dictionary of :code:`<Webhook>`.
:paramtype webhooks: dict[str, ~flow.models.Webhook]
"""
super(NotificationSetting, self).__init__(**kwargs)
self.emails = kwargs.get('emails', None)
self.email_on = kwargs.get('email_on', None)
self.webhooks = kwargs.get('webhooks', None)
class ODataError(msrest.serialization.Model):
"""Represents OData v4 error object.
:ivar code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
:ivar details: Gets or sets additional details about the error.
:vartype details: list[~flow.models.ODataErrorDetail]
:ivar innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:vartype innererror: ~flow.models.ODataInnerError
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ODataErrorDetail]'},
'innererror': {'key': 'innererror', 'type': 'ODataInnerError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
:keyword details: Gets or sets additional details about the error.
:paramtype details: list[~flow.models.ODataErrorDetail]
:keyword innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:paramtype innererror: ~flow.models.ODataInnerError
"""
super(ODataError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.innererror = kwargs.get('innererror', None)
class ODataErrorDetail(msrest.serialization.Model):
"""Represents additional error details.
:ivar code: Gets or sets a language-independent, service-defined error code.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
"""
super(ODataErrorDetail, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
class ODataErrorResponse(msrest.serialization.Model):
"""Represents OData v4 compliant error response message.
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ODataErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class ODataInnerError(msrest.serialization.Model):
"""The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:ivar client_request_id: Gets or sets the client provided request ID.
:vartype client_request_id: str
:ivar service_request_id: Gets or sets the server generated request ID.
:vartype service_request_id: str
:ivar trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype trace: str
:ivar context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype context: str
"""
_attribute_map = {
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'service_request_id': {'key': 'serviceRequestId', 'type': 'str'},
'trace': {'key': 'trace', 'type': 'str'},
'context': {'key': 'context', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword client_request_id: Gets or sets the client provided request ID.
:paramtype client_request_id: str
:keyword service_request_id: Gets or sets the server generated request ID.
:paramtype service_request_id: str
:keyword trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype trace: str
:keyword context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype context: str
"""
super(ODataInnerError, self).__init__(**kwargs)
self.client_request_id = kwargs.get('client_request_id', None)
self.service_request_id = kwargs.get('service_request_id', None)
self.trace = kwargs.get('trace', None)
self.context = kwargs.get('context', None)
class OutputData(msrest.serialization.Model):
"""OutputData.
:ivar output_location:
:vartype output_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:vartype mechanism: str or ~flow.models.OutputMechanism
:ivar additional_options:
:vartype additional_options: ~flow.models.OutputOptions
:ivar environment_variable_name:
:vartype environment_variable_name: str
"""
_attribute_map = {
'output_location': {'key': 'outputLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'additional_options': {'key': 'additionalOptions', 'type': 'OutputOptions'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword output_location:
:paramtype output_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:paramtype mechanism: str or ~flow.models.OutputMechanism
:keyword additional_options:
:paramtype additional_options: ~flow.models.OutputOptions
:keyword environment_variable_name:
:paramtype environment_variable_name: str
"""
super(OutputData, self).__init__(**kwargs)
self.output_location = kwargs.get('output_location', None)
self.mechanism = kwargs.get('mechanism', None)
self.additional_options = kwargs.get('additional_options', None)
self.environment_variable_name = kwargs.get('environment_variable_name', None)
class OutputDataBinding(msrest.serialization.Model):
"""OutputDataBinding.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar asset_uri:
:vartype asset_uri: str
:ivar is_asset_job_output:
:vartype is_asset_job_output: bool
:ivar job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'mode': {'key': 'mode', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'is_asset_job_output': {'key': 'isAssetJobOutput', 'type': 'bool'},
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword asset_uri:
:paramtype asset_uri: str
:keyword is_asset_job_output:
:paramtype is_asset_job_output: bool
:keyword job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(OutputDataBinding, self).__init__(**kwargs)
self.datastore_id = kwargs.get('datastore_id', None)
self.path_on_datastore = kwargs.get('path_on_datastore', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.description = kwargs.get('description', None)
self.uri = kwargs.get('uri', None)
self.mode = kwargs.get('mode', None)
self.asset_uri = kwargs.get('asset_uri', None)
self.is_asset_job_output = kwargs.get('is_asset_job_output', None)
self.job_output_type = kwargs.get('job_output_type', None)
self.asset_name = kwargs.get('asset_name', None)
self.asset_version = kwargs.get('asset_version', None)
self.auto_delete_setting = kwargs.get('auto_delete_setting', None)
class OutputDatasetLineage(msrest.serialization.Model):
"""OutputDatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar output_type: Possible values include: "RunOutput", "Reference".
:vartype output_type: str or ~flow.models.DatasetOutputType
:ivar output_details:
:vartype output_details: ~flow.models.DatasetOutputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'output_type': {'key': 'outputType', 'type': 'str'},
'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword output_type: Possible values include: "RunOutput", "Reference".
:paramtype output_type: str or ~flow.models.DatasetOutputType
:keyword output_details:
:paramtype output_details: ~flow.models.DatasetOutputDetails
"""
super(OutputDatasetLineage, self).__init__(**kwargs)
self.identifier = kwargs.get('identifier', None)
self.output_type = kwargs.get('output_type', None)
self.output_details = kwargs.get('output_details', None)
class OutputDefinition(msrest.serialization.Model):
"""OutputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar description:
:vartype description: str
:ivar is_property:
:vartype is_property: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'description': {'key': 'description', 'type': 'str'},
'is_property': {'key': 'isProperty', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword description:
:paramtype description: str
:keyword is_property:
:paramtype is_property: bool
"""
super(OutputDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.description = kwargs.get('description', None)
self.is_property = kwargs.get('is_property', None)
class OutputOptions(msrest.serialization.Model):
"""OutputOptions.
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar registration_options:
:vartype registration_options: ~flow.models.RegistrationOptions
:ivar upload_options:
:vartype upload_options: ~flow.models.UploadOptions
:ivar mount_options: Dictionary of :code:`<string>`.
:vartype mount_options: dict[str, str]
"""
_attribute_map = {
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'registration_options': {'key': 'registrationOptions', 'type': 'RegistrationOptions'},
'upload_options': {'key': 'uploadOptions', 'type': 'UploadOptions'},
'mount_options': {'key': 'mountOptions', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword registration_options:
:paramtype registration_options: ~flow.models.RegistrationOptions
:keyword upload_options:
:paramtype upload_options: ~flow.models.UploadOptions
:keyword mount_options: Dictionary of :code:`<string>`.
:paramtype mount_options: dict[str, str]
"""
super(OutputOptions, self).__init__(**kwargs)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.registration_options = kwargs.get('registration_options', None)
self.upload_options = kwargs.get('upload_options', None)
self.mount_options = kwargs.get('mount_options', None)
class OutputSetting(msrest.serialization.Model):
"""OutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'ParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'ParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'ParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(OutputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_name_parameter_assignment = kwargs.get('data_store_name_parameter_assignment', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.data_store_mode_parameter_assignment = kwargs.get('data_store_mode_parameter_assignment', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.path_on_compute_parameter_assignment = kwargs.get('path_on_compute_parameter_assignment', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.web_service_port = kwargs.get('web_service_port', None)
self.dataset_registration = kwargs.get('dataset_registration', None)
self.dataset_output_options = kwargs.get('dataset_output_options', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.asset_output_settings_parameter_name = kwargs.get('asset_output_settings_parameter_name', None)
class OutputSettingSpec(msrest.serialization.Model):
"""OutputSettingSpec.
:ivar supported_data_store_modes:
:vartype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:ivar default_asset_output_path:
:vartype default_asset_output_path: str
"""
_attribute_map = {
'supported_data_store_modes': {'key': 'supportedDataStoreModes', 'type': '[str]'},
'default_asset_output_path': {'key': 'defaultAssetOutputPath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword supported_data_store_modes:
:paramtype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:keyword default_asset_output_path:
:paramtype default_asset_output_path: str
"""
super(OutputSettingSpec, self).__init__(**kwargs)
self.supported_data_store_modes = kwargs.get('supported_data_store_modes', None)
self.default_asset_output_path = kwargs.get('default_asset_output_path', None)
class PaginatedDataInfoList(msrest.serialization.Model):
"""A paginated list of DataInfos.
:ivar value: An array of objects of type DataInfo.
:vartype value: list[~flow.models.DataInfo]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[DataInfo]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type DataInfo.
:paramtype value: list[~flow.models.DataInfo]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedDataInfoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedModelDtoList(msrest.serialization.Model):
"""A paginated list of ModelDtos.
:ivar value: An array of objects of type ModelDto.
:vartype value: list[~flow.models.ModelDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModelDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type ModelDto.
:paramtype value: list[~flow.models.ModelDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModelDtoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedModuleDtoList(msrest.serialization.Model):
"""A paginated list of ModuleDtos.
:ivar value: An array of objects of type ModuleDto.
:vartype value: list[~flow.models.ModuleDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModuleDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type ModuleDto.
:paramtype value: list[~flow.models.ModuleDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModuleDtoList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineDraftSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineDraftSummarys.
:ivar value: An array of objects of type PipelineDraftSummary.
:vartype value: list[~flow.models.PipelineDraftSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineDraftSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineDraftSummary.
:paramtype value: list[~flow.models.PipelineDraftSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineDraftSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineEndpointSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineEndpointSummarys.
:ivar value: An array of objects of type PipelineEndpointSummary.
:vartype value: list[~flow.models.PipelineEndpointSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineEndpointSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineEndpointSummary.
:paramtype value: list[~flow.models.PipelineEndpointSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineEndpointSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPipelineRunSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineRunSummarys.
:ivar value: An array of objects of type PipelineRunSummary.
:vartype value: list[~flow.models.PipelineRunSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineRunSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineRunSummary.
:paramtype value: list[~flow.models.PipelineRunSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineRunSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class PaginatedPublishedPipelineSummaryList(msrest.serialization.Model):
"""A paginated list of PublishedPipelineSummarys.
:ivar value: An array of objects of type PublishedPipelineSummary.
:vartype value: list[~flow.models.PublishedPipelineSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublishedPipelineSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value: An array of objects of type PublishedPipelineSummary.
:paramtype value: list[~flow.models.PublishedPipelineSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPublishedPipelineSummaryList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.next_link = kwargs.get('next_link', None)
class ParallelForControlFlowInfo(msrest.serialization.Model):
"""ParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'ParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
super(ParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = kwargs.get('parallel_for_items_input', None)
class ParallelTaskConfiguration(msrest.serialization.Model):
"""ParallelTaskConfiguration.
:ivar max_retries_per_worker:
:vartype max_retries_per_worker: int
:ivar worker_count_per_node:
:vartype worker_count_per_node: int
:ivar terminal_exit_codes:
:vartype terminal_exit_codes: list[int]
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
"""
_attribute_map = {
'max_retries_per_worker': {'key': 'maxRetriesPerWorker', 'type': 'int'},
'worker_count_per_node': {'key': 'workerCountPerNode', 'type': 'int'},
'terminal_exit_codes': {'key': 'terminalExitCodes', 'type': '[int]'},
'configuration': {'key': 'configuration', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retries_per_worker:
:paramtype max_retries_per_worker: int
:keyword worker_count_per_node:
:paramtype worker_count_per_node: int
:keyword terminal_exit_codes:
:paramtype terminal_exit_codes: list[int]
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
"""
super(ParallelTaskConfiguration, self).__init__(**kwargs)
self.max_retries_per_worker = kwargs.get('max_retries_per_worker', None)
self.worker_count_per_node = kwargs.get('worker_count_per_node', None)
self.terminal_exit_codes = kwargs.get('terminal_exit_codes', None)
self.configuration = kwargs.get('configuration', None)
class Parameter(msrest.serialization.Model):
"""Parameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: str
:ivar is_optional:
:vartype is_optional: bool
:ivar min_max_rules:
:vartype min_max_rules: list[~flow.models.MinMaxParameterRule]
:ivar enum_rules:
:vartype enum_rules: list[~flow.models.EnumParameterRule]
:ivar type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype type: str or ~flow.models.ParameterType
:ivar label:
:vartype label: str
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'min_max_rules': {'key': 'minMaxRules', 'type': '[MinMaxParameterRule]'},
'enum_rules': {'key': 'enumRules', 'type': '[EnumParameterRule]'},
'type': {'key': 'type', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: str
:keyword is_optional:
:paramtype is_optional: bool
:keyword min_max_rules:
:paramtype min_max_rules: list[~flow.models.MinMaxParameterRule]
:keyword enum_rules:
:paramtype enum_rules: list[~flow.models.EnumParameterRule]
:keyword type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:paramtype type: str or ~flow.models.ParameterType
:keyword label:
:paramtype label: str
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
"""
super(Parameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.documentation = kwargs.get('documentation', None)
self.default_value = kwargs.get('default_value', None)
self.is_optional = kwargs.get('is_optional', None)
self.min_max_rules = kwargs.get('min_max_rules', None)
self.enum_rules = kwargs.get('enum_rules', None)
self.type = kwargs.get('type', None)
self.label = kwargs.get('label', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
self.ui_hint = kwargs.get('ui_hint', None)
class ParameterAssignment(msrest.serialization.Model):
"""ParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(ParameterAssignment, self).__init__(**kwargs)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class ParameterDefinition(msrest.serialization.Model):
"""ParameterDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar value:
:vartype value: str
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
:keyword value:
:paramtype value: str
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ParameterDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.value = kwargs.get('value', None)
self.is_optional = kwargs.get('is_optional', None)
class PatchFlowRequest(msrest.serialization.Model):
"""PatchFlowRequest.
:ivar flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:vartype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
"""
_attribute_map = {
'flow_patch_operation_type': {'key': 'flowPatchOperationType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:paramtype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
"""
super(PatchFlowRequest, self).__init__(**kwargs)
self.flow_patch_operation_type = kwargs.get('flow_patch_operation_type', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
class Pipeline(msrest.serialization.Model):
"""Pipeline.
:ivar run_id:
:vartype run_id: str
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar default_datastore_name:
:vartype default_datastore_name: str
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.PipelineInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.PipelineOutput]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'},
'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{PipelineInput}'},
'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword default_datastore_name:
:paramtype default_datastore_name: str
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.PipelineInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.PipelineOutput]
"""
super(Pipeline, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.default_datastore_name = kwargs.get('default_datastore_name', None)
self.component_jobs = kwargs.get('component_jobs', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class PipelineDraft(msrest.serialization.Model):
"""PipelineDraft.
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar source_pipeline_run_id:
:vartype source_pipeline_run_id: str
:ivar latest_pipeline_run_id:
:vartype latest_pipeline_run_id: str
:ivar latest_run_experiment_name:
:vartype latest_run_experiment_name: str
:ivar latest_run_experiment_id:
:vartype latest_run_experiment_id: str
:ivar is_latest_run_experiment_archived:
:vartype is_latest_run_experiment_archived: bool
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar linked_pipelines_info:
:vartype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar studio_migration_info:
:vartype studio_migration_info: ~flow.models.StudioMigrationInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar pipeline_timeout:
:vartype pipeline_timeout: int
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'source_pipeline_run_id': {'key': 'sourcePipelineRunId', 'type': 'str'},
'latest_pipeline_run_id': {'key': 'latestPipelineRunId', 'type': 'str'},
'latest_run_experiment_name': {'key': 'latestRunExperimentName', 'type': 'str'},
'latest_run_experiment_id': {'key': 'latestRunExperimentId', 'type': 'str'},
'is_latest_run_experiment_archived': {'key': 'isLatestRunExperimentArchived', 'type': 'bool'},
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'linked_pipelines_info': {'key': 'linkedPipelinesInfo', 'type': '[LinkedPipelineInfo]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'studio_migration_info': {'key': 'studioMigrationInfo', 'type': 'StudioMigrationInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'pipeline_timeout': {'key': 'pipelineTimeout', 'type': 'int'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword source_pipeline_run_id:
:paramtype source_pipeline_run_id: str
:keyword latest_pipeline_run_id:
:paramtype latest_pipeline_run_id: str
:keyword latest_run_experiment_name:
:paramtype latest_run_experiment_name: str
:keyword latest_run_experiment_id:
:paramtype latest_run_experiment_id: str
:keyword is_latest_run_experiment_archived:
:paramtype is_latest_run_experiment_archived: bool
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword linked_pipelines_info:
:paramtype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword studio_migration_info:
:paramtype studio_migration_info: ~flow.models.StudioMigrationInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword pipeline_timeout:
:paramtype pipeline_timeout: int
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraft, self).__init__(**kwargs)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.source_pipeline_run_id = kwargs.get('source_pipeline_run_id', None)
self.latest_pipeline_run_id = kwargs.get('latest_pipeline_run_id', None)
self.latest_run_experiment_name = kwargs.get('latest_run_experiment_name', None)
self.latest_run_experiment_id = kwargs.get('latest_run_experiment_id', None)
self.is_latest_run_experiment_archived = kwargs.get('is_latest_run_experiment_archived', None)
self.status = kwargs.get('status', None)
self.graph_detail = kwargs.get('graph_detail', None)
self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None)
self.linked_pipelines_info = kwargs.get('linked_pipelines_info', None)
self.nodes_in_draft = kwargs.get('nodes_in_draft', None)
self.studio_migration_info = kwargs.get('studio_migration_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.pipeline_timeout = kwargs.get('pipeline_timeout', None)
self.identity_config = kwargs.get('identity_config', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineDraftStepDetails(msrest.serialization.Model):
"""PipelineDraftStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_run_id:
:vartype reused_run_id: str
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar output_log:
:vartype output_log: str
:ivar run_configuration:
:vartype run_configuration: ~flow.models.RunConfiguration
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_run_id': {'key': 'reusedRunId', 'type': 'str'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'logs': {'key': 'logs', 'type': '{str}'},
'output_log': {'key': 'outputLog', 'type': 'str'},
'run_configuration': {'key': 'runConfiguration', 'type': 'RunConfiguration'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_run_id:
:paramtype reused_run_id: str
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword output_log:
:paramtype output_log: str
:keyword run_configuration:
:paramtype run_configuration: ~flow.models.RunConfiguration
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineDraftStepDetails, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_reused = kwargs.get('is_reused', None)
self.reused_run_id = kwargs.get('reused_run_id', None)
self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None)
self.logs = kwargs.get('logs', None)
self.output_log = kwargs.get('output_log', None)
self.run_configuration = kwargs.get('run_configuration', None)
self.outputs = kwargs.get('outputs', None)
self.port_outputs = kwargs.get('port_outputs', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
class PipelineDraftSummary(msrest.serialization.Model):
"""PipelineDraftSummary.
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraftSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineEndpoint(msrest.serialization.Model):
"""PipelineEndpoint.
:ivar default_version:
:vartype default_version: str
:ivar default_pipeline_id:
:vartype default_pipeline_id: str
:ivar default_graph_id:
:vartype default_graph_id: str
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar published_by:
:vartype published_by: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar default_pipeline_name:
:vartype default_pipeline_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'default_pipeline_id': {'key': 'defaultPipelineId', 'type': 'str'},
'default_graph_id': {'key': 'defaultGraphId', 'type': 'str'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'default_pipeline_name': {'key': 'defaultPipelineName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword default_version:
:paramtype default_version: str
:keyword default_pipeline_id:
:paramtype default_pipeline_id: str
:keyword default_graph_id:
:paramtype default_graph_id: str
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword published_by:
:paramtype published_by: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword default_pipeline_name:
:paramtype default_pipeline_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpoint, self).__init__(**kwargs)
self.default_version = kwargs.get('default_version', None)
self.default_pipeline_id = kwargs.get('default_pipeline_id', None)
self.default_graph_id = kwargs.get('default_graph_id', None)
self.rest_endpoint = kwargs.get('rest_endpoint', None)
self.published_date = kwargs.get('published_date', None)
self.published_by = kwargs.get('published_by', None)
self.parameters = kwargs.get('parameters', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.default_pipeline_name = kwargs.get('default_pipeline_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.updated_by = kwargs.get('updated_by', None)
self.swagger_url = kwargs.get('swagger_url', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.tags = kwargs.get('tags', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineEndpointSummary(msrest.serialization.Model):
"""PipelineEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpointSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.updated_by = kwargs.get('updated_by', None)
self.swagger_url = kwargs.get('swagger_url', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.tags = kwargs.get('tags', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineGraph(msrest.serialization.Model):
"""PipelineGraph.
:ivar graph_module_dtos:
:vartype graph_module_dtos: list[~flow.models.ModuleDto]
:ivar graph_data_sources:
:vartype graph_data_sources: list[~flow.models.DataInfo]
:ivar graphs: This is a dictionary.
:vartype graphs: dict[str, ~flow.models.PipelineGraph]
:ivar graph_drafts: This is a dictionary.
:vartype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar referenced_node_id:
:vartype referenced_node_id: str
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar node_telemetry_meta_infos:
:vartype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_module_dtos': {'key': 'graphModuleDtos', 'type': '[ModuleDto]'},
'graph_data_sources': {'key': 'graphDataSources', 'type': '[DataInfo]'},
'graphs': {'key': 'graphs', 'type': '{PipelineGraph}'},
'graph_drafts': {'key': 'graphDrafts', 'type': '{PipelineGraph}'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'referenced_node_id': {'key': 'referencedNodeId', 'type': 'str'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'node_telemetry_meta_infos': {'key': 'nodeTelemetryMetaInfos', 'type': '[NodeTelemetryMetaInfo]'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_module_dtos:
:paramtype graph_module_dtos: list[~flow.models.ModuleDto]
:keyword graph_data_sources:
:paramtype graph_data_sources: list[~flow.models.DataInfo]
:keyword graphs: This is a dictionary.
:paramtype graphs: dict[str, ~flow.models.PipelineGraph]
:keyword graph_drafts: This is a dictionary.
:paramtype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword referenced_node_id:
:paramtype referenced_node_id: str
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword node_telemetry_meta_infos:
:paramtype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineGraph, self).__init__(**kwargs)
self.graph_module_dtos = kwargs.get('graph_module_dtos', None)
self.graph_data_sources = kwargs.get('graph_data_sources', None)
self.graphs = kwargs.get('graphs', None)
self.graph_drafts = kwargs.get('graph_drafts', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.referenced_node_id = kwargs.get('referenced_node_id', None)
self.pipeline_run_setting_parameters = kwargs.get('pipeline_run_setting_parameters', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.real_time_endpoint_info = kwargs.get('real_time_endpoint_info', None)
self.node_telemetry_meta_infos = kwargs.get('node_telemetry_meta_infos', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.module_nodes = kwargs.get('module_nodes', None)
self.dataset_nodes = kwargs.get('dataset_nodes', None)
self.sub_graph_nodes = kwargs.get('sub_graph_nodes', None)
self.control_reference_nodes = kwargs.get('control_reference_nodes', None)
self.control_nodes = kwargs.get('control_nodes', None)
self.edges = kwargs.get('edges', None)
self.entity_interface = kwargs.get('entity_interface', None)
self.graph_layout = kwargs.get('graph_layout', None)
self.created_by = kwargs.get('created_by', None)
self.last_updated_by = kwargs.get('last_updated_by', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.extended_properties = kwargs.get('extended_properties', None)
self.parent_sub_graph_module_ids = kwargs.get('parent_sub_graph_module_ids', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineInput(msrest.serialization.Model):
"""PipelineInput.
:ivar data:
:vartype data: ~flow.models.InputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
"""
super(PipelineInput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
class PipelineJob(msrest.serialization.Model):
"""PipelineJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:vartype pipeline_job_type: str
:ivar pipeline:
:vartype pipeline: ~flow.models.Pipeline
:ivar compute_id:
:vartype compute_id: str
:ivar run_id:
:vartype run_id: str
:ivar settings: Anything.
:vartype settings: any
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar bindings:
:vartype bindings: list[~flow.models.Binding]
:ivar jobs: This is a dictionary.
:vartype jobs: dict[str, any]
:ivar input_bindings: This is a dictionary.
:vartype input_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_bindings: This is a dictionary.
:vartype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar source_job_id:
:vartype source_job_id: str
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'pipeline_job_type': {'key': 'pipelineJobType', 'type': 'str'},
'pipeline': {'key': 'pipeline', 'type': 'Pipeline'},
'compute_id': {'key': 'computeId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'settings': {'key': 'settings', 'type': 'object'},
'component_jobs': {'key': 'componentJobs', 'type': '{MfeInternalV20211001ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'bindings': {'key': 'bindings', 'type': '[Binding]'},
'jobs': {'key': 'jobs', 'type': '{object}'},
'input_bindings': {'key': 'inputBindings', 'type': '{InputDataBinding}'},
'output_bindings': {'key': 'outputBindings', 'type': '{OutputDataBinding}'},
'source_job_id': {'key': 'sourceJobId', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:paramtype pipeline_job_type: str
:keyword pipeline:
:paramtype pipeline: ~flow.models.Pipeline
:keyword compute_id:
:paramtype compute_id: str
:keyword run_id:
:paramtype run_id: str
:keyword settings: Anything.
:paramtype settings: any
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword bindings:
:paramtype bindings: list[~flow.models.Binding]
:keyword jobs: This is a dictionary.
:paramtype jobs: dict[str, any]
:keyword input_bindings: This is a dictionary.
:paramtype input_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_bindings: This is a dictionary.
:paramtype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword source_job_id:
:paramtype source_job_id: str
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.pipeline_job_type = kwargs.get('pipeline_job_type', None)
self.pipeline = kwargs.get('pipeline', None)
self.compute_id = kwargs.get('compute_id', None)
self.run_id = kwargs.get('run_id', None)
self.settings = kwargs.get('settings', None)
self.component_jobs = kwargs.get('component_jobs', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.bindings = kwargs.get('bindings', None)
self.jobs = kwargs.get('jobs', None)
self.input_bindings = kwargs.get('input_bindings', None)
self.output_bindings = kwargs.get('output_bindings', None)
self.source_job_id = kwargs.get('source_job_id', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class PipelineJobRuntimeBasicSettings(msrest.serialization.Model):
"""PipelineJobRuntimeBasicSettings.
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
_attribute_map = {
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
super(PipelineJobRuntimeBasicSettings, self).__init__(**kwargs)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.tags = kwargs.get('tags', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.trigger_time_string = kwargs.get('trigger_time_string', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
class PipelineJobScheduleDto(msrest.serialization.Model):
"""PipelineJobScheduleDto.
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJobScheduleDto, self).__init__(**kwargs)
self.system_data = kwargs.get('system_data', None)
self.name = kwargs.get('name', None)
self.pipeline_job_name = kwargs.get('pipeline_job_name', None)
self.pipeline_job_runtime_settings = kwargs.get('pipeline_job_runtime_settings', None)
self.display_name = kwargs.get('display_name', None)
self.trigger_type = kwargs.get('trigger_type', None)
self.recurrence = kwargs.get('recurrence', None)
self.cron = kwargs.get('cron', None)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class PipelineOutput(msrest.serialization.Model):
"""PipelineOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
"""
super(PipelineOutput, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
class PipelineRun(msrest.serialization.Model):
"""PipelineRun.
:ivar pipeline_id:
:vartype pipeline_id: str
:ivar run_source:
:vartype run_source: str
:ivar run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:vartype run_type: str or ~flow.models.RunType
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar total_steps:
:vartype total_steps: int
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar user_alias:
:vartype user_alias: str
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar pipeline_timeout_seconds:
:vartype pipeline_timeout_seconds: int
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'pipeline_id': {'key': 'pipelineId', 'type': 'str'},
'run_source': {'key': 'runSource', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'logs': {'key': 'logs', 'type': '{str}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'pipeline_timeout_seconds': {'key': 'pipelineTimeoutSeconds', 'type': 'int'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword pipeline_id:
:paramtype pipeline_id: str
:keyword run_source:
:paramtype run_source: str
:keyword run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:paramtype run_type: str or ~flow.models.RunType
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword total_steps:
:paramtype total_steps: int
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword user_alias:
:paramtype user_alias: str
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword pipeline_timeout_seconds:
:paramtype pipeline_timeout_seconds: int
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRun, self).__init__(**kwargs)
self.pipeline_id = kwargs.get('pipeline_id', None)
self.run_source = kwargs.get('run_source', None)
self.run_type = kwargs.get('run_type', None)
self.parameters = kwargs.get('parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.total_steps = kwargs.get('total_steps', None)
self.logs = kwargs.get('logs', None)
self.user_alias = kwargs.get('user_alias', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.continue_run_on_failed_optional_input = kwargs.get('continue_run_on_failed_optional_input', None)
self.default_compute = kwargs.get('default_compute', None)
self.default_datastore = kwargs.get('default_datastore', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.pipeline_timeout_seconds = kwargs.get('pipeline_timeout_seconds', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.identity_config = kwargs.get('identity_config', None)
self.description = kwargs.get('description', None)
self.display_name = kwargs.get('display_name', None)
self.run_number = kwargs.get('run_number', None)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.graph_id = kwargs.get('graph_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
self.submitted_by = kwargs.get('submitted_by', None)
self.tags = kwargs.get('tags', None)
self.step_tags = kwargs.get('step_tags', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineRunGraphDetail(msrest.serialization.Model):
"""PipelineRunGraphDetail.
:ivar graph:
:vartype graph: ~flow.models.PipelineGraph
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
_attribute_map = {
'graph': {'key': 'graph', 'type': 'PipelineGraph'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph:
:paramtype graph: ~flow.models.PipelineGraph
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
super(PipelineRunGraphDetail, self).__init__(**kwargs)
self.graph = kwargs.get('graph', None)
self.graph_nodes_status = kwargs.get('graph_nodes_status', None)
class PipelineRunGraphStatus(msrest.serialization.Model):
"""PipelineRunGraphStatus.
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:ivar experiment_id:
:vartype experiment_id: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:keyword experiment_id:
:paramtype experiment_id: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineRunGraphStatus, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.graph_nodes_status = kwargs.get('graph_nodes_status', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
class PipelineRunProfile(msrest.serialization.Model):
"""PipelineRunProfile.
:ivar run_id:
:vartype run_id: str
:ivar node_id:
:vartype node_id: str
:ivar run_url:
:vartype run_url: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar description:
:vartype description: str
:ivar status:
:vartype status: ~flow.models.PipelineRunStatus
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar profiling_time:
:vartype profiling_time: long
:ivar step_runs_profile:
:vartype step_runs_profile: list[~flow.models.StepRunProfile]
:ivar sub_pipeline_run_profile:
:vartype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'status': {'key': 'status', 'type': 'PipelineRunStatus'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'profiling_time': {'key': 'profilingTime', 'type': 'long'},
'step_runs_profile': {'key': 'stepRunsProfile', 'type': '[StepRunProfile]'},
'sub_pipeline_run_profile': {'key': 'subPipelineRunProfile', 'type': '[PipelineRunProfile]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword run_url:
:paramtype run_url: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword description:
:paramtype description: str
:keyword status:
:paramtype status: ~flow.models.PipelineRunStatus
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword profiling_time:
:paramtype profiling_time: long
:keyword step_runs_profile:
:paramtype step_runs_profile: list[~flow.models.StepRunProfile]
:keyword sub_pipeline_run_profile:
:paramtype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
super(PipelineRunProfile, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.node_id = kwargs.get('node_id', None)
self.run_url = kwargs.get('run_url', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.description = kwargs.get('description', None)
self.status = kwargs.get('status', None)
self.create_time = kwargs.get('create_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.profiling_time = kwargs.get('profiling_time', None)
self.step_runs_profile = kwargs.get('step_runs_profile', None)
self.sub_pipeline_run_profile = kwargs.get('sub_pipeline_run_profile', None)
class PipelineRunStatus(msrest.serialization.Model):
"""PipelineRunStatus.
:ivar status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype status_code: str or ~flow.models.PipelineRunStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:paramtype status_code: str or ~flow.models.PipelineRunStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineRunStatus, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.status_detail = kwargs.get('status_detail', None)
self.creation_time = kwargs.get('creation_time', None)
self.end_time = kwargs.get('end_time', None)
class PipelineRunStepDetails(msrest.serialization.Model):
"""PipelineRunStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar snapshot_info:
:vartype snapshot_info: ~flow.models.SnapshotInfo
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'logs': {'key': 'logs', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'snapshot_info': {'key': 'snapshotInfo', 'type': 'SnapshotInfo'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword snapshot_info:
:paramtype snapshot_info: ~flow.models.SnapshotInfo
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
super(PipelineRunStepDetails, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_reused = kwargs.get('is_reused', None)
self.logs = kwargs.get('logs', None)
self.outputs = kwargs.get('outputs', None)
self.snapshot_info = kwargs.get('snapshot_info', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
class PipelineRunSummary(msrest.serialization.Model):
"""PipelineRunSummary.
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRunSummary, self).__init__(**kwargs)
self.description = kwargs.get('description', None)
self.display_name = kwargs.get('display_name', None)
self.run_number = kwargs.get('run_number', None)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.graph_id = kwargs.get('graph_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.is_experiment_archived = kwargs.get('is_experiment_archived', None)
self.submitted_by = kwargs.get('submitted_by', None)
self.tags = kwargs.get('tags', None)
self.step_tags = kwargs.get('step_tags', None)
self.properties = kwargs.get('properties', None)
self.aether_start_time = kwargs.get('aether_start_time', None)
self.aether_end_time = kwargs.get('aether_end_time', None)
self.run_history_start_time = kwargs.get('run_history_start_time', None)
self.run_history_end_time = kwargs.get('run_history_end_time', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PipelineStatus(msrest.serialization.Model):
"""PipelineStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_terminal_state:
:vartype is_terminal_state: bool
"""
_validation = {
'is_terminal_state': {'readonly': True},
}
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_terminal_state': {'key': 'isTerminalState', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineStatus, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.run_status = kwargs.get('run_status', None)
self.status_detail = kwargs.get('status_detail', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.is_terminal_state = None
class PipelineStepRun(msrest.serialization.Model):
"""PipelineStepRun.
:ivar step_name:
:vartype step_name: str
:ivar run_number:
:vartype run_number: int
:ivar run_id:
:vartype run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar compute_target:
:vartype compute_target: str
:ivar compute_type:
:vartype compute_type: str
:ivar run_type:
:vartype run_type: str
:ivar step_type:
:vartype step_type: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
:ivar display_name:
:vartype display_name: str
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'display_name': {'key': 'displayName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword run_number:
:paramtype run_number: int
:keyword run_id:
:paramtype run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_type:
:paramtype compute_type: str
:keyword run_type:
:paramtype run_type: str
:keyword step_type:
:paramtype step_type: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
:keyword display_name:
:paramtype display_name: str
"""
super(PipelineStepRun, self).__init__(**kwargs)
self.step_name = kwargs.get('step_name', None)
self.run_number = kwargs.get('run_number', None)
self.run_id = kwargs.get('run_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.run_status = kwargs.get('run_status', None)
self.compute_target = kwargs.get('compute_target', None)
self.compute_type = kwargs.get('compute_type', None)
self.run_type = kwargs.get('run_type', None)
self.step_type = kwargs.get('step_type', None)
self.tags = kwargs.get('tags', None)
self.is_reused = kwargs.get('is_reused', None)
self.display_name = kwargs.get('display_name', None)
class PipelineStepRunOutputs(msrest.serialization.Model):
"""PipelineStepRunOutputs.
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
_attribute_map = {
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
super(PipelineStepRunOutputs, self).__init__(**kwargs)
self.outputs = kwargs.get('outputs', None)
self.port_outputs = kwargs.get('port_outputs', None)
class PipelineSubDraft(msrest.serialization.Model):
"""PipelineSubDraft.
:ivar parent_graph_draft_id:
:vartype parent_graph_draft_id: str
:ivar parent_node_id:
:vartype parent_node_id: str
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar module_dto:
:vartype module_dto: ~flow.models.ModuleDto
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'parent_graph_draft_id': {'key': 'parentGraphDraftId', 'type': 'str'},
'parent_node_id': {'key': 'parentNodeId', 'type': 'str'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'module_dto': {'key': 'moduleDto', 'type': 'ModuleDto'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parent_graph_draft_id:
:paramtype parent_graph_draft_id: str
:keyword parent_node_id:
:paramtype parent_node_id: str
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword module_dto:
:paramtype module_dto: ~flow.models.ModuleDto
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineSubDraft, self).__init__(**kwargs)
self.parent_graph_draft_id = kwargs.get('parent_graph_draft_id', None)
self.parent_node_id = kwargs.get('parent_node_id', None)
self.graph_detail = kwargs.get('graph_detail', None)
self.module_dto = kwargs.get('module_dto', None)
self.name = kwargs.get('name', None)
self.last_edited_by = kwargs.get('last_edited_by', None)
self.created_by = kwargs.get('created_by', None)
self.description = kwargs.get('description', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PolicyValidationResponse(msrest.serialization.Model):
"""PolicyValidationResponse.
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
"""
_attribute_map = {
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
"""
super(PolicyValidationResponse, self).__init__(**kwargs)
self.error_response = kwargs.get('error_response', None)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
class PortInfo(msrest.serialization.Model):
"""PortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(PortInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.graph_port_name = kwargs.get('graph_port_name', None)
self.is_parameter = kwargs.get('is_parameter', None)
self.web_service_port = kwargs.get('web_service_port', None)
class PortOutputInfo(msrest.serialization.Model):
"""PortOutputInfo.
:ivar container_uri:
:vartype container_uri: str
:ivar relative_path:
:vartype relative_path: str
:ivar preview_params:
:vartype preview_params: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar is_file:
:vartype is_file: bool
:ivar supported_actions:
:vartype supported_actions: list[str or ~flow.models.PortAction]
"""
_attribute_map = {
'container_uri': {'key': 'containerUri', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'preview_params': {'key': 'previewParams', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
'supported_actions': {'key': 'supportedActions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword container_uri:
:paramtype container_uri: str
:keyword relative_path:
:paramtype relative_path: str
:keyword preview_params:
:paramtype preview_params: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword is_file:
:paramtype is_file: bool
:keyword supported_actions:
:paramtype supported_actions: list[str or ~flow.models.PortAction]
"""
super(PortOutputInfo, self).__init__(**kwargs)
self.container_uri = kwargs.get('container_uri', None)
self.relative_path = kwargs.get('relative_path', None)
self.preview_params = kwargs.get('preview_params', None)
self.model_output_path = kwargs.get('model_output_path', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_reference_type = kwargs.get('data_reference_type', None)
self.is_file = kwargs.get('is_file', None)
self.supported_actions = kwargs.get('supported_actions', None)
class PriorityConfig(msrest.serialization.Model):
"""PriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(PriorityConfig, self).__init__(**kwargs)
self.job_priority = kwargs.get('job_priority', None)
self.is_preemptible = kwargs.get('is_preemptible', None)
self.node_count_set = kwargs.get('node_count_set', None)
self.scale_interval = kwargs.get('scale_interval', None)
class PriorityConfiguration(msrest.serialization.Model):
"""PriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(PriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = kwargs.get('cloud_priority', None)
self.string_type_priority = kwargs.get('string_type_priority', None)
class PromoteDataSetRequest(msrest.serialization.Model):
"""PromoteDataSetRequest.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar step_run_id:
:vartype step_run_id: str
:ivar output_port_name:
:vartype output_port_name: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar dataset_type:
:vartype dataset_type: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar output_relative_path:
:vartype output_relative_path: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'output_relative_path': {'key': 'outputRelativePath', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword step_run_id:
:paramtype step_run_id: str
:keyword output_port_name:
:paramtype output_port_name: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword dataset_type:
:paramtype dataset_type: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword output_relative_path:
:paramtype output_relative_path: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(PromoteDataSetRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.module_node_id = kwargs.get('module_node_id', None)
self.step_run_id = kwargs.get('step_run_id', None)
self.output_port_name = kwargs.get('output_port_name', None)
self.model_output_path = kwargs.get('model_output_path', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.dataset_type = kwargs.get('dataset_type', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.output_relative_path = kwargs.get('output_relative_path', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.root_pipeline_run_id = kwargs.get('root_pipeline_run_id', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
class ProviderEntity(msrest.serialization.Model):
"""ProviderEntity.
:ivar provider:
:vartype provider: str
:ivar module:
:vartype module: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar apis:
:vartype apis: list[~flow.models.ApiAndParameters]
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'apis': {'key': 'apis', 'type': '[ApiAndParameters]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword provider:
:paramtype provider: str
:keyword module:
:paramtype module: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword apis:
:paramtype apis: list[~flow.models.ApiAndParameters]
"""
super(ProviderEntity, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.module = kwargs.get('module', None)
self.connection_type = kwargs.get('connection_type', None)
self.apis = kwargs.get('apis', None)
class PublishedPipeline(msrest.serialization.Model):
"""PublishedPipeline.
:ivar total_run_steps:
:vartype total_run_steps: int
:ivar total_runs:
:vartype total_runs: int
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'total_run_steps': {'key': 'totalRunSteps', 'type': 'int'},
'total_runs': {'key': 'totalRuns', 'type': 'int'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword total_run_steps:
:paramtype total_run_steps: int
:keyword total_runs:
:paramtype total_runs: int
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipeline, self).__init__(**kwargs)
self.total_run_steps = kwargs.get('total_run_steps', None)
self.total_runs = kwargs.get('total_runs', None)
self.parameters = kwargs.get('parameters', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.rest_endpoint = kwargs.get('rest_endpoint', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.graph_id = kwargs.get('graph_id', None)
self.published_date = kwargs.get('published_date', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.published_by = kwargs.get('published_by', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.version = kwargs.get('version', None)
self.is_default = kwargs.get('is_default', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PublishedPipelineSummary(msrest.serialization.Model):
"""PublishedPipelineSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipelineSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.graph_id = kwargs.get('graph_id', None)
self.published_date = kwargs.get('published_date', None)
self.last_run_time = kwargs.get('last_run_time', None)
self.last_run_status = kwargs.get('last_run_status', None)
self.published_by = kwargs.get('published_by', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.version = kwargs.get('version', None)
self.is_default = kwargs.get('is_default', None)
self.entity_status = kwargs.get('entity_status', None)
self.id = kwargs.get('id', None)
self.etag = kwargs.get('etag', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
class PythonInterfaceMapping(msrest.serialization.Model):
"""PythonInterfaceMapping.
:ivar name:
:vartype name: str
:ivar name_in_yaml:
:vartype name_in_yaml: str
:ivar argument_name:
:vartype argument_name: str
:ivar command_line_option:
:vartype command_line_option: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'name_in_yaml': {'key': 'nameInYaml', 'type': 'str'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'command_line_option': {'key': 'commandLineOption', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword name_in_yaml:
:paramtype name_in_yaml: str
:keyword argument_name:
:paramtype argument_name: str
:keyword command_line_option:
:paramtype command_line_option: str
"""
super(PythonInterfaceMapping, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.name_in_yaml = kwargs.get('name_in_yaml', None)
self.argument_name = kwargs.get('argument_name', None)
self.command_line_option = kwargs.get('command_line_option', None)
class PythonPyPiOrRCranLibraryDto(msrest.serialization.Model):
"""PythonPyPiOrRCranLibraryDto.
:ivar package:
:vartype package: str
:ivar repo:
:vartype repo: str
"""
_attribute_map = {
'package': {'key': 'package', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword package:
:paramtype package: str
:keyword repo:
:paramtype repo: str
"""
super(PythonPyPiOrRCranLibraryDto, self).__init__(**kwargs)
self.package = kwargs.get('package', None)
self.repo = kwargs.get('repo', None)
class PythonSection(msrest.serialization.Model):
"""PythonSection.
:ivar interpreter_path:
:vartype interpreter_path: str
:ivar user_managed_dependencies:
:vartype user_managed_dependencies: bool
:ivar conda_dependencies: Anything.
:vartype conda_dependencies: any
:ivar base_conda_environment:
:vartype base_conda_environment: str
"""
_attribute_map = {
'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword interpreter_path:
:paramtype interpreter_path: str
:keyword user_managed_dependencies:
:paramtype user_managed_dependencies: bool
:keyword conda_dependencies: Anything.
:paramtype conda_dependencies: any
:keyword base_conda_environment:
:paramtype base_conda_environment: str
"""
super(PythonSection, self).__init__(**kwargs)
self.interpreter_path = kwargs.get('interpreter_path', None)
self.user_managed_dependencies = kwargs.get('user_managed_dependencies', None)
self.conda_dependencies = kwargs.get('conda_dependencies', None)
self.base_conda_environment = kwargs.get('base_conda_environment', None)
class PyTorchConfiguration(msrest.serialization.Model):
"""PyTorchConfiguration.
:ivar communication_backend:
:vartype communication_backend: str
:ivar process_count:
:vartype process_count: int
"""
_attribute_map = {
'communication_backend': {'key': 'communicationBackend', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword communication_backend:
:paramtype communication_backend: str
:keyword process_count:
:paramtype process_count: int
"""
super(PyTorchConfiguration, self).__init__(**kwargs)
self.communication_backend = kwargs.get('communication_backend', None)
self.process_count = kwargs.get('process_count', None)
class QueueingInfo(msrest.serialization.Model):
"""QueueingInfo.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar last_refresh_timestamp:
:vartype last_refresh_timestamp: ~datetime.datetime
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword last_refresh_timestamp:
:paramtype last_refresh_timestamp: ~datetime.datetime
"""
super(QueueingInfo, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.last_refresh_timestamp = kwargs.get('last_refresh_timestamp', None)
class RawComponentDto(msrest.serialization.Model):
"""RawComponentDto.
:ivar component_schema:
:vartype component_schema: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar type: Possible values include: "Unknown", "CommandComponent", "Command".
:vartype type: str or ~flow.models.ComponentType
:ivar component_type_version:
:vartype component_type_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar successful_return_code:
:vartype successful_return_code: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentOutput]
:ivar command:
:vartype command: str
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar component_internal_id:
:vartype component_internal_id: str
"""
_attribute_map = {
'component_schema': {'key': 'componentSchema', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'component_type_version': {'key': 'componentTypeVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'successful_return_code': {'key': 'successfulReturnCode', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'},
'command': {'key': 'command', 'type': 'str'},
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'component_internal_id': {'key': 'componentInternalId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword component_schema:
:paramtype component_schema: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword type: Possible values include: "Unknown", "CommandComponent", "Command".
:paramtype type: str or ~flow.models.ComponentType
:keyword component_type_version:
:paramtype component_type_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword successful_return_code:
:paramtype successful_return_code: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentOutput]
:keyword command:
:paramtype command: str
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword component_internal_id:
:paramtype component_internal_id: str
"""
super(RawComponentDto, self).__init__(**kwargs)
self.component_schema = kwargs.get('component_schema', None)
self.is_anonymous = kwargs.get('is_anonymous', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.type = kwargs.get('type', None)
self.component_type_version = kwargs.get('component_type_version', None)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.is_deterministic = kwargs.get('is_deterministic', None)
self.successful_return_code = kwargs.get('successful_return_code', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.command = kwargs.get('command', None)
self.environment_name = kwargs.get('environment_name', None)
self.environment_version = kwargs.get('environment_version', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.created_by = kwargs.get('created_by', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.created_date = kwargs.get('created_date', None)
self.last_modified_date = kwargs.get('last_modified_date', None)
self.component_internal_id = kwargs.get('component_internal_id', None)
class RayConfiguration(msrest.serialization.Model):
"""RayConfiguration.
:ivar port:
:vartype port: int
:ivar address:
:vartype address: str
:ivar include_dashboard:
:vartype include_dashboard: bool
:ivar dashboard_port:
:vartype dashboard_port: int
:ivar head_node_additional_args:
:vartype head_node_additional_args: str
:ivar worker_node_additional_args:
:vartype worker_node_additional_args: str
"""
_attribute_map = {
'port': {'key': 'port', 'type': 'int'},
'address': {'key': 'address', 'type': 'str'},
'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'},
'dashboard_port': {'key': 'dashboardPort', 'type': 'int'},
'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'},
'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword port:
:paramtype port: int
:keyword address:
:paramtype address: str
:keyword include_dashboard:
:paramtype include_dashboard: bool
:keyword dashboard_port:
:paramtype dashboard_port: int
:keyword head_node_additional_args:
:paramtype head_node_additional_args: str
:keyword worker_node_additional_args:
:paramtype worker_node_additional_args: str
"""
super(RayConfiguration, self).__init__(**kwargs)
self.port = kwargs.get('port', None)
self.address = kwargs.get('address', None)
self.include_dashboard = kwargs.get('include_dashboard', None)
self.dashboard_port = kwargs.get('dashboard_port', None)
self.head_node_additional_args = kwargs.get('head_node_additional_args', None)
self.worker_node_additional_args = kwargs.get('worker_node_additional_args', None)
class RCranPackage(msrest.serialization.Model):
"""RCranPackage.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar repository:
:vartype repository: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword repository:
:paramtype repository: str
"""
super(RCranPackage, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.repository = kwargs.get('repository', None)
class RealTimeEndpoint(msrest.serialization.Model):
"""RealTimeEndpoint.
:ivar created_by:
:vartype created_by: str
:ivar kv_tags: Dictionary of :code:`<string>`.
:vartype kv_tags: dict[str, str]
:ivar state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:vartype state: str or ~flow.models.WebServiceState
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar image_id:
:vartype image_id: str
:ivar cpu:
:vartype cpu: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar max_concurrent_requests_per_container:
:vartype max_concurrent_requests_per_container: int
:ivar num_replicas:
:vartype num_replicas: int
:ivar event_hub_enabled:
:vartype event_hub_enabled: bool
:ivar storage_enabled:
:vartype storage_enabled: bool
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar auto_scale_enabled:
:vartype auto_scale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
:ivar scoring_uri:
:vartype scoring_uri: str
:ivar deployment_status:
:vartype deployment_status: ~flow.models.AKSReplicaStatus
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar auth_enabled:
:vartype auth_enabled: bool
:ivar aad_auth_enabled:
:vartype aad_auth_enabled: bool
:ivar region:
:vartype region: str
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
:ivar swagger_uri:
:vartype swagger_uri: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar warning:
:vartype warning: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'state': {'key': 'state', 'type': 'str'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'image_id': {'key': 'imageId', 'type': 'str'},
'cpu': {'key': 'cpu', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'auto_scale_enabled': {'key': 'autoScaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'AKSReplicaStatus'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
'region': {'key': 'region', 'type': 'str'},
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'warning': {'key': 'warning', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_by:
:paramtype created_by: str
:keyword kv_tags: Dictionary of :code:`<string>`.
:paramtype kv_tags: dict[str, str]
:keyword state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:paramtype state: str or ~flow.models.WebServiceState
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword image_id:
:paramtype image_id: str
:keyword cpu:
:paramtype cpu: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword max_concurrent_requests_per_container:
:paramtype max_concurrent_requests_per_container: int
:keyword num_replicas:
:paramtype num_replicas: int
:keyword event_hub_enabled:
:paramtype event_hub_enabled: bool
:keyword storage_enabled:
:paramtype storage_enabled: bool
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword auto_scale_enabled:
:paramtype auto_scale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
:keyword scoring_uri:
:paramtype scoring_uri: str
:keyword deployment_status:
:paramtype deployment_status: ~flow.models.AKSReplicaStatus
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword auth_enabled:
:paramtype auth_enabled: bool
:keyword aad_auth_enabled:
:paramtype aad_auth_enabled: bool
:keyword region:
:paramtype region: str
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
:keyword swagger_uri:
:paramtype swagger_uri: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword warning:
:paramtype warning: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpoint, self).__init__(**kwargs)
self.created_by = kwargs.get('created_by', None)
self.kv_tags = kwargs.get('kv_tags', None)
self.state = kwargs.get('state', None)
self.error = kwargs.get('error', None)
self.compute_type = kwargs.get('compute_type', None)
self.image_id = kwargs.get('image_id', None)
self.cpu = kwargs.get('cpu', None)
self.memory_in_gb = kwargs.get('memory_in_gb', None)
self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
self.num_replicas = kwargs.get('num_replicas', None)
self.event_hub_enabled = kwargs.get('event_hub_enabled', None)
self.storage_enabled = kwargs.get('storage_enabled', None)
self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
self.auto_scale_enabled = kwargs.get('auto_scale_enabled', None)
self.min_replicas = kwargs.get('min_replicas', None)
self.max_replicas = kwargs.get('max_replicas', None)
self.target_utilization = kwargs.get('target_utilization', None)
self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
self.scoring_uri = kwargs.get('scoring_uri', None)
self.deployment_status = kwargs.get('deployment_status', None)
self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
self.auth_enabled = kwargs.get('auth_enabled', None)
self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
self.region = kwargs.get('region', None)
self.primary_key = kwargs.get('primary_key', None)
self.secondary_key = kwargs.get('secondary_key', None)
self.swagger_uri = kwargs.get('swagger_uri', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
self.linked_pipeline_run_id = kwargs.get('linked_pipeline_run_id', None)
self.warning = kwargs.get('warning', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.id = kwargs.get('id', None)
self.created_time = kwargs.get('created_time', None)
self.updated_time = kwargs.get('updated_time', None)
self.compute_name = kwargs.get('compute_name', None)
self.updated_by = kwargs.get('updated_by', None)
class RealTimeEndpointInfo(msrest.serialization.Model):
"""RealTimeEndpointInfo.
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar deployments_info:
:vartype deployments_info: list[~flow.models.DeploymentInfo]
"""
_attribute_map = {
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'deployments_info': {'key': 'deploymentsInfo', 'type': '[DeploymentInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword deployments_info:
:paramtype deployments_info: list[~flow.models.DeploymentInfo]
"""
super(RealTimeEndpointInfo, self).__init__(**kwargs)
self.web_service_inputs = kwargs.get('web_service_inputs', None)
self.web_service_outputs = kwargs.get('web_service_outputs', None)
self.deployments_info = kwargs.get('deployments_info', None)
class RealTimeEndpointStatus(msrest.serialization.Model):
"""RealTimeEndpointStatus.
:ivar last_operation: Possible values include: "Create", "Update", "Delete".
:vartype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:ivar last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:vartype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:ivar internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:vartype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:ivar status_detail:
:vartype status_detail: str
:ivar deployment_state:
:vartype deployment_state: str
:ivar service_id:
:vartype service_id: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
"""
_attribute_map = {
'last_operation': {'key': 'lastOperation', 'type': 'str'},
'last_operation_status': {'key': 'lastOperationStatus', 'type': 'str'},
'internal_step': {'key': 'internalStep', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'deployment_state': {'key': 'deploymentState', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword last_operation: Possible values include: "Create", "Update", "Delete".
:paramtype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:keyword last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:paramtype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:keyword internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:paramtype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:keyword status_detail:
:paramtype status_detail: str
:keyword deployment_state:
:paramtype deployment_state: str
:keyword service_id:
:paramtype service_id: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
"""
super(RealTimeEndpointStatus, self).__init__(**kwargs)
self.last_operation = kwargs.get('last_operation', None)
self.last_operation_status = kwargs.get('last_operation_status', None)
self.internal_step = kwargs.get('internal_step', None)
self.status_detail = kwargs.get('status_detail', None)
self.deployment_state = kwargs.get('deployment_state', None)
self.service_id = kwargs.get('service_id', None)
self.linked_pipeline_draft_id = kwargs.get('linked_pipeline_draft_id', None)
class RealTimeEndpointSummary(msrest.serialization.Model):
"""RealTimeEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpointSummary, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.id = kwargs.get('id', None)
self.created_time = kwargs.get('created_time', None)
self.updated_time = kwargs.get('updated_time', None)
self.compute_type = kwargs.get('compute_type', None)
self.compute_name = kwargs.get('compute_name', None)
self.updated_by = kwargs.get('updated_by', None)
class RealTimeEndpointTestRequest(msrest.serialization.Model):
"""RealTimeEndpointTestRequest.
:ivar end_point:
:vartype end_point: str
:ivar auth_key:
:vartype auth_key: str
:ivar payload:
:vartype payload: str
"""
_attribute_map = {
'end_point': {'key': 'endPoint', 'type': 'str'},
'auth_key': {'key': 'authKey', 'type': 'str'},
'payload': {'key': 'payload', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword end_point:
:paramtype end_point: str
:keyword auth_key:
:paramtype auth_key: str
:keyword payload:
:paramtype payload: str
"""
super(RealTimeEndpointTestRequest, self).__init__(**kwargs)
self.end_point = kwargs.get('end_point', None)
self.auth_key = kwargs.get('auth_key', None)
self.payload = kwargs.get('payload', None)
class Recurrence(msrest.serialization.Model):
"""Recurrence.
:ivar frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:vartype frequency: str or ~flow.models.Frequency
:ivar interval:
:vartype interval: int
:ivar schedule:
:vartype schedule: ~flow.models.RecurrenceSchedule
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:paramtype frequency: str or ~flow.models.Frequency
:keyword interval:
:paramtype interval: int
:keyword schedule:
:paramtype schedule: ~flow.models.RecurrenceSchedule
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Recurrence, self).__init__(**kwargs)
self.frequency = kwargs.get('frequency', None)
self.interval = kwargs.get('interval', None)
self.schedule = kwargs.get('schedule', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
class RecurrencePattern(msrest.serialization.Model):
"""RecurrencePattern.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar weekdays:
:vartype weekdays: list[str or ~flow.models.Weekday]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'weekdays': {'key': 'weekdays', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword weekdays:
:paramtype weekdays: list[str or ~flow.models.Weekday]
"""
super(RecurrencePattern, self).__init__(**kwargs)
self.hours = kwargs.get('hours', None)
self.minutes = kwargs.get('minutes', None)
self.weekdays = kwargs.get('weekdays', None)
class RecurrenceSchedule(msrest.serialization.Model):
"""RecurrenceSchedule.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar week_days:
:vartype week_days: list[str or ~flow.models.WeekDays]
:ivar month_days:
:vartype month_days: list[int]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'week_days': {'key': 'weekDays', 'type': '[str]'},
'month_days': {'key': 'monthDays', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword week_days:
:paramtype week_days: list[str or ~flow.models.WeekDays]
:keyword month_days:
:paramtype month_days: list[int]
"""
super(RecurrenceSchedule, self).__init__(**kwargs)
self.hours = kwargs.get('hours', None)
self.minutes = kwargs.get('minutes', None)
self.week_days = kwargs.get('week_days', None)
self.month_days = kwargs.get('month_days', None)
class RegenerateServiceKeysRequest(msrest.serialization.Model):
"""RegenerateServiceKeysRequest.
:ivar key_type: Possible values include: "Primary", "Secondary".
:vartype key_type: str or ~flow.models.KeyType
:ivar key_value:
:vartype key_value: str
"""
_attribute_map = {
'key_type': {'key': 'keyType', 'type': 'str'},
'key_value': {'key': 'keyValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword key_type: Possible values include: "Primary", "Secondary".
:paramtype key_type: str or ~flow.models.KeyType
:keyword key_value:
:paramtype key_value: str
"""
super(RegenerateServiceKeysRequest, self).__init__(**kwargs)
self.key_type = kwargs.get('key_type', None)
self.key_value = kwargs.get('key_value', None)
class RegisterComponentMetaInfo(msrest.serialization.Model):
"""RegisterComponentMetaInfo.
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterComponentMetaInfo, self).__init__(**kwargs)
self.aml_module_name = kwargs.get('aml_module_name', None)
self.name_only_display_info = kwargs.get('name_only_display_info', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.component_registration_type = kwargs.get('component_registration_type', None)
self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None)
self.data_type_mechanism = kwargs.get('data_type_mechanism', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.content_hash = kwargs.get('content_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
self.registration = kwargs.get('registration', None)
self.validate_only = kwargs.get('validate_only', None)
self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None)
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None)
self.system_managed_registration = kwargs.get('system_managed_registration', None)
self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None)
self.module_source = kwargs.get('module_source', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None)
self.module_os_type = kwargs.get('module_os_type', None)
self.module_codegen_by = kwargs.get('module_codegen_by', None)
self.module_client_source = kwargs.get('module_client_source', None)
self.module_is_builtin = kwargs.get('module_is_builtin', None)
self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None)
class RegisterComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisterComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisteredDataSetReference(msrest.serialization.Model):
"""RegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RegisteredDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class RegisterRegistryComponentMetaInfo(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfo.
:ivar registry_name:
:vartype registry_name: str
:ivar intellectual_property_publisher_information:
:vartype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:ivar blob_reference_data: This is a dictionary.
:vartype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'intellectual_property_publisher_information': {'key': 'intellectualPropertyPublisherInformation', 'type': 'IntellectualPropertyPublisherInformation'},
'blob_reference_data': {'key': 'blobReferenceData', 'type': '{RegistryBlobReferenceData}'},
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterRegistryComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterRegistryComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword intellectual_property_publisher_information:
:paramtype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:keyword blob_reference_data: This is a dictionary.
:paramtype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterRegistryComponentMetaInfo, self).__init__(**kwargs)
self.registry_name = kwargs.get('registry_name', None)
self.intellectual_property_publisher_information = kwargs.get('intellectual_property_publisher_information', None)
self.blob_reference_data = kwargs.get('blob_reference_data', None)
self.aml_module_name = kwargs.get('aml_module_name', None)
self.name_only_display_info = kwargs.get('name_only_display_info', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.module_version_id = kwargs.get('module_version_id', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.component_registration_type = kwargs.get('component_registration_type', None)
self.module_entity_from_yaml = kwargs.get('module_entity_from_yaml', None)
self.set_as_default_version = kwargs.get('set_as_default_version', None)
self.data_types_from_yaml = kwargs.get('data_types_from_yaml', None)
self.data_type_mechanism = kwargs.get('data_type_mechanism', None)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.content_hash = kwargs.get('content_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
self.registration = kwargs.get('registration', None)
self.validate_only = kwargs.get('validate_only', None)
self.skip_workspace_related_check = kwargs.get('skip_workspace_related_check', None)
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = kwargs.get('intellectual_property_protected_workspace_component_registration_allowed_publisher', None)
self.system_managed_registration = kwargs.get('system_managed_registration', None)
self.allow_dup_name_between_input_and_ouput_port = kwargs.get('allow_dup_name_between_input_and_ouput_port', None)
self.module_source = kwargs.get('module_source', None)
self.module_scope = kwargs.get('module_scope', None)
self.module_additional_includes_count = kwargs.get('module_additional_includes_count', None)
self.module_os_type = kwargs.get('module_os_type', None)
self.module_codegen_by = kwargs.get('module_codegen_by', None)
self.module_client_source = kwargs.get('module_client_source', None)
self.module_is_builtin = kwargs.get('module_is_builtin', None)
self.module_register_event_extension_fields = kwargs.get('module_register_event_extension_fields', None)
class RegisterRegistryComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegisterRegistryComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class RegistrationOptions(msrest.serialization.Model):
"""RegistrationOptions.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar dataset_registration_options:
:vartype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'dataset_registration_options': {'key': 'datasetRegistrationOptions', 'type': 'DatasetRegistrationOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword dataset_registration_options:
:paramtype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
super(RegistrationOptions, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.dataset_registration_options = kwargs.get('dataset_registration_options', None)
class RegistryBlobReferenceData(msrest.serialization.Model):
"""RegistryBlobReferenceData.
:ivar data_reference_id:
:vartype data_reference_id: str
:ivar data:
:vartype data: str
"""
_attribute_map = {
'data_reference_id': {'key': 'dataReferenceId', 'type': 'str'},
'data': {'key': 'data', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_reference_id:
:paramtype data_reference_id: str
:keyword data:
:paramtype data: str
"""
super(RegistryBlobReferenceData, self).__init__(**kwargs)
self.data_reference_id = kwargs.get('data_reference_id', None)
self.data = kwargs.get('data', None)
class RegistryIdentity(msrest.serialization.Model):
"""RegistryIdentity.
:ivar resource_id:
:vartype resource_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword resource_id:
:paramtype resource_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(RegistryIdentity, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.client_id = kwargs.get('client_id', None)
class Relationship(msrest.serialization.Model):
"""Relationship.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar relation_type:
:vartype relation_type: str
:ivar target_entity_id:
:vartype target_entity_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar entity_type:
:vartype entity_type: str
:ivar direction:
:vartype direction: str
:ivar entity_container_id:
:vartype entity_container_id: str
"""
_validation = {
'entity_type': {'readonly': True},
'entity_container_id': {'readonly': True},
}
_attribute_map = {
'relation_type': {'key': 'relationType', 'type': 'str'},
'target_entity_id': {'key': 'targetEntityId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'direction': {'key': 'direction', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword relation_type:
:paramtype relation_type: str
:keyword target_entity_id:
:paramtype target_entity_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword direction:
:paramtype direction: str
"""
super(Relationship, self).__init__(**kwargs)
self.relation_type = kwargs.get('relation_type', None)
self.target_entity_id = kwargs.get('target_entity_id', None)
self.asset_id = kwargs.get('asset_id', None)
self.entity_type = None
self.direction = kwargs.get('direction', None)
self.entity_container_id = None
class RemoteDockerComputeInfo(msrest.serialization.Model):
"""RemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(RemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.private_key = kwargs.get('private_key', None)
class ResourceConfig(msrest.serialization.Model):
"""ResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfig, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class ResourceConfiguration(msrest.serialization.Model):
"""ResourceConfiguration.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfiguration, self).__init__(**kwargs)
self.gpu_count = kwargs.get('gpu_count', None)
self.cpu_count = kwargs.get('cpu_count', None)
self.memory_request_in_gb = kwargs.get('memory_request_in_gb', None)
class ResourcesSetting(msrest.serialization.Model):
"""ResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(ResourcesSetting, self).__init__(**kwargs)
self.instance_size = kwargs.get('instance_size', None)
self.spark_version = kwargs.get('spark_version', None)
class RetrieveToolFuncResultRequest(msrest.serialization.Model):
"""RetrieveToolFuncResultRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
:ivar func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:vartype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
'func_call_scenario': {'key': 'func_call_scenario', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
:keyword func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:paramtype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
super(RetrieveToolFuncResultRequest, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
self.func_call_scenario = kwargs.get('func_call_scenario', None)
class RetryConfiguration(msrest.serialization.Model):
"""RetryConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
"""
super(RetryConfiguration, self).__init__(**kwargs)
self.max_retry_count = kwargs.get('max_retry_count', None)
class RGitHubPackage(msrest.serialization.Model):
"""RGitHubPackage.
:ivar repository:
:vartype repository: str
:ivar auth_token:
:vartype auth_token: str
"""
_attribute_map = {
'repository': {'key': 'repository', 'type': 'str'},
'auth_token': {'key': 'authToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword repository:
:paramtype repository: str
:keyword auth_token:
:paramtype auth_token: str
"""
super(RGitHubPackage, self).__init__(**kwargs)
self.repository = kwargs.get('repository', None)
self.auth_token = kwargs.get('auth_token', None)
class RootError(msrest.serialization.Model):
"""The root error.
:ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:vartype code: str
:ivar severity: The Severity of error.
:vartype severity: int
:ivar message: A human-readable representation of the error.
:vartype message: str
:ivar message_format: An unformatted version of the message with no variable substitution.
:vartype message_format: str
:ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
:vartype message_parameters: dict[str, str]
:ivar reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:vartype reference_code: str
:ivar details_uri: A URI which points to more details about the context of the error.
:vartype details_uri: str
:ivar target: The target of the error (e.g., the name of the property in error).
:vartype target: str
:ivar details: The related errors that occurred during the request.
:vartype details: list[~flow.models.RootError]
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
:ivar additional_info: The error additional info.
:vartype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'message_format': {'key': 'messageFormat', 'type': 'str'},
'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[RootError]'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:paramtype code: str
:keyword severity: The Severity of error.
:paramtype severity: int
:keyword message: A human-readable representation of the error.
:paramtype message: str
:keyword message_format: An unformatted version of the message with no variable substitution.
:paramtype message_format: str
:keyword message_parameters: Value substitutions corresponding to the contents of
MessageFormat.
:paramtype message_parameters: dict[str, str]
:keyword reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:paramtype reference_code: str
:keyword details_uri: A URI which points to more details about the context of the error.
:paramtype details_uri: str
:keyword target: The target of the error (e.g., the name of the property in error).
:paramtype target: str
:keyword details: The related errors that occurred during the request.
:paramtype details: list[~flow.models.RootError]
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
:keyword additional_info: The error additional info.
:paramtype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
super(RootError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.severity = kwargs.get('severity', None)
self.message = kwargs.get('message', None)
self.message_format = kwargs.get('message_format', None)
self.message_parameters = kwargs.get('message_parameters', None)
self.reference_code = kwargs.get('reference_code', None)
self.details_uri = kwargs.get('details_uri', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.inner_error = kwargs.get('inner_error', None)
self.additional_info = kwargs.get('additional_info', None)
class RSection(msrest.serialization.Model):
"""RSection.
:ivar r_version:
:vartype r_version: str
:ivar user_managed:
:vartype user_managed: bool
:ivar rscript_path:
:vartype rscript_path: str
:ivar snapshot_date:
:vartype snapshot_date: str
:ivar cran_packages:
:vartype cran_packages: list[~flow.models.RCranPackage]
:ivar git_hub_packages:
:vartype git_hub_packages: list[~flow.models.RGitHubPackage]
:ivar custom_url_packages:
:vartype custom_url_packages: list[str]
:ivar bio_conductor_packages:
:vartype bio_conductor_packages: list[str]
"""
_attribute_map = {
'r_version': {'key': 'rVersion', 'type': 'str'},
'user_managed': {'key': 'userManaged', 'type': 'bool'},
'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword r_version:
:paramtype r_version: str
:keyword user_managed:
:paramtype user_managed: bool
:keyword rscript_path:
:paramtype rscript_path: str
:keyword snapshot_date:
:paramtype snapshot_date: str
:keyword cran_packages:
:paramtype cran_packages: list[~flow.models.RCranPackage]
:keyword git_hub_packages:
:paramtype git_hub_packages: list[~flow.models.RGitHubPackage]
:keyword custom_url_packages:
:paramtype custom_url_packages: list[str]
:keyword bio_conductor_packages:
:paramtype bio_conductor_packages: list[str]
"""
super(RSection, self).__init__(**kwargs)
self.r_version = kwargs.get('r_version', None)
self.user_managed = kwargs.get('user_managed', None)
self.rscript_path = kwargs.get('rscript_path', None)
self.snapshot_date = kwargs.get('snapshot_date', None)
self.cran_packages = kwargs.get('cran_packages', None)
self.git_hub_packages = kwargs.get('git_hub_packages', None)
self.custom_url_packages = kwargs.get('custom_url_packages', None)
self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
class RunAnnotations(msrest.serialization.Model):
"""RunAnnotations.
:ivar display_name:
:vartype display_name: str
:ivar status:
:vartype status: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar estimated_cost:
:vartype estimated_cost: float
:ivar primary_metric_summary:
:vartype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:ivar metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:vartype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar modified_time:
:vartype modified_time: ~datetime.datetime
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar error:
:vartype error: ~flow.models.IndexedErrorResponse
:ivar resource_metric_summary:
:vartype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar compute_duration:
:vartype compute_duration: str
:ivar compute_duration_milliseconds:
:vartype compute_duration_milliseconds: float
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'estimated_cost': {'key': 'estimatedCost', 'type': 'float'},
'primary_metric_summary': {'key': 'primaryMetricSummary', 'type': 'RunIndexMetricSummary'},
'metrics': {'key': 'metrics', 'type': '{RunIndexMetricSummarySystemObject}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'settings': {'key': 'settings', 'type': '{str}'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'error': {'key': 'error', 'type': 'IndexedErrorResponse'},
'resource_metric_summary': {'key': 'resourceMetricSummary', 'type': 'RunIndexResourceMetricSummary'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'compute_duration_milliseconds': {'key': 'computeDurationMilliseconds', 'type': 'float'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword status:
:paramtype status: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword estimated_cost:
:paramtype estimated_cost: float
:keyword primary_metric_summary:
:paramtype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:keyword metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:paramtype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword modified_time:
:paramtype modified_time: ~datetime.datetime
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword error:
:paramtype error: ~flow.models.IndexedErrorResponse
:keyword resource_metric_summary:
:paramtype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword compute_duration:
:paramtype compute_duration: str
:keyword compute_duration_milliseconds:
:paramtype compute_duration_milliseconds: float
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunAnnotations, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.status = kwargs.get('status', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.estimated_cost = kwargs.get('estimated_cost', None)
self.primary_metric_summary = kwargs.get('primary_metric_summary', None)
self.metrics = kwargs.get('metrics', None)
self.parameters = kwargs.get('parameters', None)
self.settings = kwargs.get('settings', None)
self.modified_time = kwargs.get('modified_time', None)
self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
self.error = kwargs.get('error', None)
self.resource_metric_summary = kwargs.get('resource_metric_summary', None)
self.job_cost = kwargs.get('job_cost', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.compute_duration_milliseconds = kwargs.get('compute_duration_milliseconds', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.archived = kwargs.get('archived', None)
self.tags = kwargs.get('tags', None)
class RunConfiguration(msrest.serialization.Model):
"""RunConfiguration.
:ivar script:
:vartype script: str
:ivar script_type: Possible values include: "Python", "Notebook".
:vartype script_type: str or ~flow.models.ScriptType
:ivar command:
:vartype command: str
:ivar use_absolute_path:
:vartype use_absolute_path: bool
:ivar arguments:
:vartype arguments: list[str]
:ivar framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch",
"PySparkInteractive", "R".
:vartype framework: str or ~flow.models.Framework
:ivar communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl",
"ParallelTask".
:vartype communicator: str or ~flow.models.Communicator
:ivar target:
:vartype target: str
:ivar auto_cluster_compute_specification:
:vartype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:ivar data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:vartype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:ivar data: Dictionary of :code:`<Data>`.
:vartype data: dict[str, ~flow.models.Data]
:ivar input_assets: Dictionary of :code:`<InputAsset>`.
:vartype input_assets: dict[str, ~flow.models.InputAsset]
:ivar output_data: Dictionary of :code:`<OutputData>`.
:vartype output_data: dict[str, ~flow.models.OutputData]
:ivar datacaches:
:vartype datacaches: list[~flow.models.DatacacheConfiguration]
:ivar job_name:
:vartype job_name: str
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: long
:ivar node_count:
:vartype node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar instance_types:
:vartype instance_types: list[str]
:ivar priority:
:vartype priority: int
:ivar credential_passthrough:
:vartype credential_passthrough: bool
:ivar identity:
:vartype identity: ~flow.models.IdentityConfiguration
:ivar environment:
:vartype environment: ~flow.models.EnvironmentDefinition
:ivar history:
:vartype history: ~flow.models.HistoryConfiguration
:ivar spark:
:vartype spark: ~flow.models.SparkConfiguration
:ivar parallel_task:
:vartype parallel_task: ~flow.models.ParallelTaskConfiguration
:ivar tensorflow:
:vartype tensorflow: ~flow.models.TensorflowConfiguration
:ivar mpi:
:vartype mpi: ~flow.models.MpiConfiguration
:ivar py_torch:
:vartype py_torch: ~flow.models.PyTorchConfiguration
:ivar ray:
:vartype ray: ~flow.models.RayConfiguration
:ivar hdi:
:vartype hdi: ~flow.models.HdiConfiguration
:ivar docker:
:vartype docker: ~flow.models.DockerConfiguration
:ivar command_return_code_config:
:vartype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar parameters:
:vartype parameters: list[~flow.models.ParameterDefinition]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.AutologgerSettings
:ivar data_bricks:
:vartype data_bricks: ~flow.models.DatabricksConfiguration
:ivar training_diagnostic_config:
:vartype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:ivar secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
_attribute_map = {
'script': {'key': 'script', 'type': 'str'},
'script_type': {'key': 'scriptType', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'use_absolute_path': {'key': 'useAbsolutePath', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'framework': {'key': 'framework', 'type': 'str'},
'communicator': {'key': 'communicator', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'auto_cluster_compute_specification': {'key': 'autoClusterComputeSpecification', 'type': 'AutoClusterComputeSpecification'},
'data_references': {'key': 'dataReferences', 'type': '{DataReferenceConfiguration}'},
'data': {'key': 'data', 'type': '{Data}'},
'input_assets': {'key': 'inputAssets', 'type': '{InputAsset}'},
'output_data': {'key': 'outputData', 'type': '{OutputData}'},
'datacaches': {'key': 'datacaches', 'type': '[DatacacheConfiguration]'},
'job_name': {'key': 'jobName', 'type': 'str'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'},
'node_count': {'key': 'nodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'priority': {'key': 'priority', 'type': 'int'},
'credential_passthrough': {'key': 'credentialPassthrough', 'type': 'bool'},
'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
'environment': {'key': 'environment', 'type': 'EnvironmentDefinition'},
'history': {'key': 'history', 'type': 'HistoryConfiguration'},
'spark': {'key': 'spark', 'type': 'SparkConfiguration'},
'parallel_task': {'key': 'parallelTask', 'type': 'ParallelTaskConfiguration'},
'tensorflow': {'key': 'tensorflow', 'type': 'TensorflowConfiguration'},
'mpi': {'key': 'mpi', 'type': 'MpiConfiguration'},
'py_torch': {'key': 'pyTorch', 'type': 'PyTorchConfiguration'},
'ray': {'key': 'ray', 'type': 'RayConfiguration'},
'hdi': {'key': 'hdi', 'type': 'HdiConfiguration'},
'docker': {'key': 'docker', 'type': 'DockerConfiguration'},
'command_return_code_config': {'key': 'commandReturnCodeConfig', 'type': 'CommandReturnCodeConfig'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'parameters': {'key': 'parameters', 'type': '[ParameterDefinition]'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'},
'data_bricks': {'key': 'dataBricks', 'type': 'DatabricksConfiguration'},
'training_diagnostic_config': {'key': 'trainingDiagnosticConfig', 'type': 'TrainingDiagnosticConfiguration'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword script:
:paramtype script: str
:keyword script_type: Possible values include: "Python", "Notebook".
:paramtype script_type: str or ~flow.models.ScriptType
:keyword command:
:paramtype command: str
:keyword use_absolute_path:
:paramtype use_absolute_path: bool
:keyword arguments:
:paramtype arguments: list[str]
:keyword framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow",
"PyTorch", "PySparkInteractive", "R".
:paramtype framework: str or ~flow.models.Framework
:keyword communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi",
"Nccl", "ParallelTask".
:paramtype communicator: str or ~flow.models.Communicator
:keyword target:
:paramtype target: str
:keyword auto_cluster_compute_specification:
:paramtype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:keyword data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:paramtype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:keyword data: Dictionary of :code:`<Data>`.
:paramtype data: dict[str, ~flow.models.Data]
:keyword input_assets: Dictionary of :code:`<InputAsset>`.
:paramtype input_assets: dict[str, ~flow.models.InputAsset]
:keyword output_data: Dictionary of :code:`<OutputData>`.
:paramtype output_data: dict[str, ~flow.models.OutputData]
:keyword datacaches:
:paramtype datacaches: list[~flow.models.DatacacheConfiguration]
:keyword job_name:
:paramtype job_name: str
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: long
:keyword node_count:
:paramtype node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword priority:
:paramtype priority: int
:keyword credential_passthrough:
:paramtype credential_passthrough: bool
:keyword identity:
:paramtype identity: ~flow.models.IdentityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentDefinition
:keyword history:
:paramtype history: ~flow.models.HistoryConfiguration
:keyword spark:
:paramtype spark: ~flow.models.SparkConfiguration
:keyword parallel_task:
:paramtype parallel_task: ~flow.models.ParallelTaskConfiguration
:keyword tensorflow:
:paramtype tensorflow: ~flow.models.TensorflowConfiguration
:keyword mpi:
:paramtype mpi: ~flow.models.MpiConfiguration
:keyword py_torch:
:paramtype py_torch: ~flow.models.PyTorchConfiguration
:keyword ray:
:paramtype ray: ~flow.models.RayConfiguration
:keyword hdi:
:paramtype hdi: ~flow.models.HdiConfiguration
:keyword docker:
:paramtype docker: ~flow.models.DockerConfiguration
:keyword command_return_code_config:
:paramtype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword parameters:
:paramtype parameters: list[~flow.models.ParameterDefinition]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.AutologgerSettings
:keyword data_bricks:
:paramtype data_bricks: ~flow.models.DatabricksConfiguration
:keyword training_diagnostic_config:
:paramtype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:keyword secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
super(RunConfiguration, self).__init__(**kwargs)
self.script = kwargs.get('script', None)
self.script_type = kwargs.get('script_type', None)
self.command = kwargs.get('command', None)
self.use_absolute_path = kwargs.get('use_absolute_path', None)
self.arguments = kwargs.get('arguments', None)
self.framework = kwargs.get('framework', None)
self.communicator = kwargs.get('communicator', None)
self.target = kwargs.get('target', None)
self.auto_cluster_compute_specification = kwargs.get('auto_cluster_compute_specification', None)
self.data_references = kwargs.get('data_references', None)
self.data = kwargs.get('data', None)
self.input_assets = kwargs.get('input_assets', None)
self.output_data = kwargs.get('output_data', None)
self.datacaches = kwargs.get('datacaches', None)
self.job_name = kwargs.get('job_name', None)
self.max_run_duration_seconds = kwargs.get('max_run_duration_seconds', None)
self.node_count = kwargs.get('node_count', None)
self.max_node_count = kwargs.get('max_node_count', None)
self.instance_types = kwargs.get('instance_types', None)
self.priority = kwargs.get('priority', None)
self.credential_passthrough = kwargs.get('credential_passthrough', None)
self.identity = kwargs.get('identity', None)
self.environment = kwargs.get('environment', None)
self.history = kwargs.get('history', None)
self.spark = kwargs.get('spark', None)
self.parallel_task = kwargs.get('parallel_task', None)
self.tensorflow = kwargs.get('tensorflow', None)
self.mpi = kwargs.get('mpi', None)
self.py_torch = kwargs.get('py_torch', None)
self.ray = kwargs.get('ray', None)
self.hdi = kwargs.get('hdi', None)
self.docker = kwargs.get('docker', None)
self.command_return_code_config = kwargs.get('command_return_code_config', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.application_endpoints = kwargs.get('application_endpoints', None)
self.parameters = kwargs.get('parameters', None)
self.autologger_settings = kwargs.get('autologger_settings', None)
self.data_bricks = kwargs.get('data_bricks', None)
self.training_diagnostic_config = kwargs.get('training_diagnostic_config', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
class RunDatasetReference(msrest.serialization.Model):
"""RunDatasetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RunDatasetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.version = kwargs.get('version', None)
class RunDefinition(msrest.serialization.Model):
"""RunDefinition.
:ivar configuration:
:vartype configuration: ~flow.models.RunConfiguration
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar snapshots:
:vartype snapshots: list[~flow.models.Snapshot]
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar display_name:
:vartype display_name: str
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar description:
:vartype description: str
:ivar cancel_reason:
:vartype cancel_reason: str
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': 'RunConfiguration'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '[Snapshot]'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'cancel_reason': {'key': 'cancelReason', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword configuration:
:paramtype configuration: ~flow.models.RunConfiguration
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword snapshots:
:paramtype snapshots: list[~flow.models.Snapshot]
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword display_name:
:paramtype display_name: str
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword description:
:paramtype description: str
:keyword cancel_reason:
:paramtype cancel_reason: str
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunDefinition, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.snapshot_id = kwargs.get('snapshot_id', None)
self.snapshots = kwargs.get('snapshots', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.run_type = kwargs.get('run_type', None)
self.display_name = kwargs.get('display_name', None)
self.environment_asset_id = kwargs.get('environment_asset_id', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.description = kwargs.get('description', None)
self.cancel_reason = kwargs.get('cancel_reason', None)
self.properties = kwargs.get('properties', None)
self.tags = kwargs.get('tags', None)
class RunDetailsDto(msrest.serialization.Model):
"""RunDetailsDto.
:ivar run_id:
:vartype run_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar target:
:vartype target: str
:ivar status:
:vartype status: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar created_time_utc:
:vartype created_time_utc: ~datetime.datetime
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar services: This is a dictionary.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar log_files: This is a dictionary.
:vartype log_files: dict[str, str]
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar revision:
:vartype revision: long
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar settings: This is a dictionary.
:vartype settings: dict[str, str]
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar user_id:
:vartype user_id: str
:ivar status_revision:
:vartype status_revision: long
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
:ivar current_attempt_id:
:vartype current_attempt_id: int
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'log_files': {'key': 'logFiles', 'type': '{str}'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'revision': {'key': 'revision', 'type': 'long'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'settings': {'key': 'settings', 'type': '{str}'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword target:
:paramtype target: str
:keyword status:
:paramtype status: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword created_time_utc:
:paramtype created_time_utc: ~datetime.datetime
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword services: This is a dictionary.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword log_files: This is a dictionary.
:paramtype log_files: dict[str, str]
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword revision:
:paramtype revision: long
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword settings: This is a dictionary.
:paramtype settings: dict[str, str]
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword user_id:
:paramtype user_id: str
:keyword status_revision:
:paramtype status_revision: long
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
:keyword current_attempt_id:
:paramtype current_attempt_id: int
"""
super(RunDetailsDto, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.root_run_uuid = kwargs.get('root_run_uuid', None)
self.target = kwargs.get('target', None)
self.status = kwargs.get('status', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.created_time_utc = kwargs.get('created_time_utc', None)
self.start_time_utc = kwargs.get('start_time_utc', None)
self.end_time_utc = kwargs.get('end_time_utc', None)
self.error = kwargs.get('error', None)
self.warnings = kwargs.get('warnings', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.parameters = kwargs.get('parameters', None)
self.services = kwargs.get('services', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
self.run_definition = kwargs.get('run_definition', None)
self.log_files = kwargs.get('log_files', None)
self.job_cost = kwargs.get('job_cost', None)
self.revision = kwargs.get('revision', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.settings = kwargs.get('settings', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.created_by = kwargs.get('created_by', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.run_number = kwargs.get('run_number', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.user_id = kwargs.get('user_id', None)
self.status_revision = kwargs.get('status_revision', None)
self.current_compute_time = kwargs.get('current_compute_time', None)
self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_utc = kwargs.get('last_modified_utc', None)
self.duration = kwargs.get('duration', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.current_attempt_id = kwargs.get('current_attempt_id', None)
class RunDetailsWarningDto(msrest.serialization.Model):
"""RunDetailsWarningDto.
:ivar source:
:vartype source: str
:ivar message:
:vartype message: str
"""
_attribute_map = {
'source': {'key': 'source', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source:
:paramtype source: str
:keyword message:
:paramtype message: str
"""
super(RunDetailsWarningDto, self).__init__(**kwargs)
self.source = kwargs.get('source', None)
self.message = kwargs.get('message', None)
class RunDto(msrest.serialization.Model):
"""RunDto.
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar user_id:
:vartype user_id: str
:ivar token:
:vartype token: str
:ivar token_expiry_time_utc:
:vartype token_expiry_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar revision:
:vartype revision: long
:ivar status_revision:
:vartype status_revision: long
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar cancelation_reason:
:vartype cancelation_reason: str
:ivar current_attempt_id:
:vartype current_attempt_id: int
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar status:
:vartype status: str
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar schedule_id:
:vartype schedule_id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar description:
:vartype description: str
:ivar hidden:
:vartype hidden: bool
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar action_uris: Dictionary of :code:`<string>`.
:vartype action_uris: dict[str, str]
:ivar script_name:
:vartype script_name: str
:ivar target:
:vartype target: str
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar services: Dictionary of :code:`<EndpointSetting>`.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar created_from:
:vartype created_from: ~flow.models.CreatedFromDto
:ivar cancel_uri:
:vartype cancel_uri: str
:ivar complete_uri:
:vartype complete_uri: str
:ivar diagnostics_uri:
:vartype diagnostics_uri: str
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar queueing_info:
:vartype queueing_info: ~flow.models.QueueingInfo
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'user_id': {'key': 'userId', 'type': 'str'},
'token': {'key': 'token', 'type': 'str'},
'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'revision': {'key': 'revision', 'type': 'long'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'schedule_id': {'key': 'scheduleId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'hidden': {'key': 'hidden', 'type': 'bool'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'tags': {'key': 'tags', 'type': '{str}'},
'settings': {'key': 'settings', 'type': '{str}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'created_from': {'key': 'createdFrom', 'type': 'CreatedFromDto'},
'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
'complete_uri': {'key': 'completeUri', 'type': 'str'},
'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword user_id:
:paramtype user_id: str
:keyword token:
:paramtype token: str
:keyword token_expiry_time_utc:
:paramtype token_expiry_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword revision:
:paramtype revision: long
:keyword status_revision:
:paramtype status_revision: long
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword cancelation_reason:
:paramtype cancelation_reason: str
:keyword current_attempt_id:
:paramtype current_attempt_id: int
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword status:
:paramtype status: str
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword schedule_id:
:paramtype schedule_id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword description:
:paramtype description: str
:keyword hidden:
:paramtype hidden: bool
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword action_uris: Dictionary of :code:`<string>`.
:paramtype action_uris: dict[str, str]
:keyword script_name:
:paramtype script_name: str
:keyword target:
:paramtype target: str
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword services: Dictionary of :code:`<EndpointSetting>`.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword created_from:
:paramtype created_from: ~flow.models.CreatedFromDto
:keyword cancel_uri:
:paramtype cancel_uri: str
:keyword complete_uri:
:paramtype complete_uri: str
:keyword diagnostics_uri:
:paramtype diagnostics_uri: str
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword queueing_info:
:paramtype queueing_info: ~flow.models.QueueingInfo
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
super(RunDto, self).__init__(**kwargs)
self.run_number = kwargs.get('run_number', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.created_utc = kwargs.get('created_utc', None)
self.created_by = kwargs.get('created_by', None)
self.user_id = kwargs.get('user_id', None)
self.token = kwargs.get('token', None)
self.token_expiry_time_utc = kwargs.get('token_expiry_time_utc', None)
self.error = kwargs.get('error', None)
self.warnings = kwargs.get('warnings', None)
self.revision = kwargs.get('revision', None)
self.status_revision = kwargs.get('status_revision', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.root_run_uuid = kwargs.get('root_run_uuid', None)
self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
self.current_compute_time = kwargs.get('current_compute_time', None)
self.compute_duration = kwargs.get('compute_duration', None)
self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_utc = kwargs.get('last_modified_utc', None)
self.duration = kwargs.get('duration', None)
self.cancelation_reason = kwargs.get('cancelation_reason', None)
self.current_attempt_id = kwargs.get('current_attempt_id', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.status = kwargs.get('status', None)
self.start_time_utc = kwargs.get('start_time_utc', None)
self.end_time_utc = kwargs.get('end_time_utc', None)
self.schedule_id = kwargs.get('schedule_id', None)
self.display_name = kwargs.get('display_name', None)
self.name = kwargs.get('name', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.description = kwargs.get('description', None)
self.hidden = kwargs.get('hidden', None)
self.run_type = kwargs.get('run_type', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.properties = kwargs.get('properties', None)
self.parameters = kwargs.get('parameters', None)
self.action_uris = kwargs.get('action_uris', None)
self.script_name = kwargs.get('script_name', None)
self.target = kwargs.get('target', None)
self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
self.tags = kwargs.get('tags', None)
self.settings = kwargs.get('settings', None)
self.services = kwargs.get('services', None)
self.input_datasets = kwargs.get('input_datasets', None)
self.output_datasets = kwargs.get('output_datasets', None)
self.run_definition = kwargs.get('run_definition', None)
self.job_specification = kwargs.get('job_specification', None)
self.primary_metric_name = kwargs.get('primary_metric_name', None)
self.created_from = kwargs.get('created_from', None)
self.cancel_uri = kwargs.get('cancel_uri', None)
self.complete_uri = kwargs.get('complete_uri', None)
self.diagnostics_uri = kwargs.get('diagnostics_uri', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
self.queueing_info = kwargs.get('queueing_info', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class RunIndexEntity(msrest.serialization.Model):
"""RunIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.RunAnnotations
:ivar properties:
:vartype properties: ~flow.models.RunProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'RunAnnotations'},
'properties': {'key': 'properties', 'type': 'RunProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.RunAnnotations
:keyword properties:
:paramtype properties: ~flow.models.RunProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(RunIndexEntity, self).__init__(**kwargs)
self.schema_id = kwargs.get('schema_id', None)
self.entity_id = kwargs.get('entity_id', None)
self.kind = kwargs.get('kind', None)
self.annotations = kwargs.get('annotations', None)
self.properties = kwargs.get('properties', None)
self.internal = kwargs.get('internal', None)
self.update_sequence = kwargs.get('update_sequence', None)
self.type = kwargs.get('type', None)
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = kwargs.get('relationships', None)
self.asset_id = kwargs.get('asset_id', None)
class RunIndexMetricSummary(msrest.serialization.Model):
"""RunIndexMetricSummary.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummary, self).__init__(**kwargs)
self.count = kwargs.get('count', None)
self.last_value = kwargs.get('last_value', None)
self.minimum_value = kwargs.get('minimum_value', None)
self.maximum_value = kwargs.get('maximum_value', None)
self.metric_type = kwargs.get('metric_type', None)
class RunIndexMetricSummarySystemObject(msrest.serialization.Model):
"""RunIndexMetricSummarySystemObject.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummarySystemObject, self).__init__(**kwargs)
self.count = kwargs.get('count', None)
self.last_value = kwargs.get('last_value', None)
self.minimum_value = kwargs.get('minimum_value', None)
self.maximum_value = kwargs.get('maximum_value', None)
self.metric_type = kwargs.get('metric_type', None)
class RunIndexResourceMetricSummary(msrest.serialization.Model):
"""RunIndexResourceMetricSummary.
:ivar gpu_utilization_percent_last_hour:
:vartype gpu_utilization_percent_last_hour: float
:ivar gpu_memory_utilization_percent_last_hour:
:vartype gpu_memory_utilization_percent_last_hour: float
:ivar gpu_energy_joules:
:vartype gpu_energy_joules: float
:ivar resource_metric_names:
:vartype resource_metric_names: list[str]
"""
_attribute_map = {
'gpu_utilization_percent_last_hour': {'key': 'gpuUtilizationPercentLastHour', 'type': 'float'},
'gpu_memory_utilization_percent_last_hour': {'key': 'gpuMemoryUtilizationPercentLastHour', 'type': 'float'},
'gpu_energy_joules': {'key': 'gpuEnergyJoules', 'type': 'float'},
'resource_metric_names': {'key': 'resourceMetricNames', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword gpu_utilization_percent_last_hour:
:paramtype gpu_utilization_percent_last_hour: float
:keyword gpu_memory_utilization_percent_last_hour:
:paramtype gpu_memory_utilization_percent_last_hour: float
:keyword gpu_energy_joules:
:paramtype gpu_energy_joules: float
:keyword resource_metric_names:
:paramtype resource_metric_names: list[str]
"""
super(RunIndexResourceMetricSummary, self).__init__(**kwargs)
self.gpu_utilization_percent_last_hour = kwargs.get('gpu_utilization_percent_last_hour', None)
self.gpu_memory_utilization_percent_last_hour = kwargs.get('gpu_memory_utilization_percent_last_hour', None)
self.gpu_energy_joules = kwargs.get('gpu_energy_joules', None)
self.resource_metric_names = kwargs.get('resource_metric_names', None)
class RunMetricDto(msrest.serialization.Model):
"""RunMetricDto.
:ivar run_id:
:vartype run_id: str
:ivar metric_id:
:vartype metric_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar metric_type:
:vartype metric_type: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar label:
:vartype label: str
:ivar num_cells:
:vartype num_cells: int
:ivar data_location:
:vartype data_location: str
:ivar cells:
:vartype cells: list[dict[str, any]]
:ivar schema:
:vartype schema: ~flow.models.MetricSchemaDto
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'metric_id': {'key': 'metricId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'metric_type': {'key': 'metricType', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'num_cells': {'key': 'numCells', 'type': 'int'},
'data_location': {'key': 'dataLocation', 'type': 'str'},
'cells': {'key': 'cells', 'type': '[{object}]'},
'schema': {'key': 'schema', 'type': 'MetricSchemaDto'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword metric_id:
:paramtype metric_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword metric_type:
:paramtype metric_type: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword label:
:paramtype label: str
:keyword num_cells:
:paramtype num_cells: int
:keyword data_location:
:paramtype data_location: str
:keyword cells:
:paramtype cells: list[dict[str, any]]
:keyword schema:
:paramtype schema: ~flow.models.MetricSchemaDto
"""
super(RunMetricDto, self).__init__(**kwargs)
self.run_id = kwargs.get('run_id', None)
self.metric_id = kwargs.get('metric_id', None)
self.data_container_id = kwargs.get('data_container_id', None)
self.metric_type = kwargs.get('metric_type', None)
self.created_utc = kwargs.get('created_utc', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.label = kwargs.get('label', None)
self.num_cells = kwargs.get('num_cells', None)
self.data_location = kwargs.get('data_location', None)
self.cells = kwargs.get('cells', None)
self.schema = kwargs.get('schema', None)
class RunMetricsTypesDto(msrest.serialization.Model):
"""RunMetricsTypesDto.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(RunMetricsTypesDto, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
class RunProperties(msrest.serialization.Model):
"""RunProperties.
:ivar data_container_id:
:vartype data_container_id: str
:ivar target_name:
:vartype target_name: str
:ivar run_name:
:vartype run_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2Index
:ivar script_name:
:vartype script_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar run_number:
:vartype run_number: int
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar user_properties: This is a dictionary.
:vartype user_properties: dict[str, str]
:ivar action_uris: This is a dictionary.
:vartype action_uris: dict[str, str]
:ivar duration:
:vartype duration: str
:ivar duration_milliseconds:
:vartype duration_milliseconds: float
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'target_name': {'key': 'targetName', 'type': 'str'},
'run_name': {'key': 'runName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2Index'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'user_properties': {'key': 'userProperties', 'type': '{str}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'duration': {'key': 'duration', 'type': 'str'},
'duration_milliseconds': {'key': 'durationMilliseconds', 'type': 'float'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword target_name:
:paramtype target_name: str
:keyword run_name:
:paramtype run_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2Index
:keyword script_name:
:paramtype script_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword run_number:
:paramtype run_number: int
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword user_properties: This is a dictionary.
:paramtype user_properties: dict[str, str]
:keyword action_uris: This is a dictionary.
:paramtype action_uris: dict[str, str]
:keyword duration:
:paramtype duration: str
:keyword duration_milliseconds:
:paramtype duration_milliseconds: float
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(RunProperties, self).__init__(**kwargs)
self.data_container_id = kwargs.get('data_container_id', None)
self.target_name = kwargs.get('target_name', None)
self.run_name = kwargs.get('run_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.root_run_id = kwargs.get('root_run_id', None)
self.run_type = kwargs.get('run_type', None)
self.run_type_v2 = kwargs.get('run_type_v2', None)
self.script_name = kwargs.get('script_name', None)
self.experiment_id = kwargs.get('experiment_id', None)
self.run_uuid = kwargs.get('run_uuid', None)
self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
self.run_number = kwargs.get('run_number', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.compute_request = kwargs.get('compute_request', None)
self.compute = kwargs.get('compute', None)
self.user_properties = kwargs.get('user_properties', None)
self.action_uris = kwargs.get('action_uris', None)
self.duration = kwargs.get('duration', None)
self.duration_milliseconds = kwargs.get('duration_milliseconds', None)
self.creation_context = kwargs.get('creation_context', None)
class RunSettingParameter(msrest.serialization.Model):
"""RunSettingParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String",
"JsonString", "YamlString", "StringList".
:vartype parameter_type: str or ~flow.models.RunSettingParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar description:
:vartype description: str
:ivar run_setting_ui_hint:
:vartype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:ivar argument_name:
:vartype argument_name: str
:ivar section_name:
:vartype section_name: str
:ivar section_description:
:vartype section_description: str
:ivar section_argument_name:
:vartype section_argument_name: str
:ivar examples:
:vartype examples: list[str]
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar disabled_by_parameters:
:vartype disabled_by_parameters: list[str]
:ivar module_run_setting_type: Possible values include: "All", "Released", "Default",
"Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full".
:vartype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:ivar linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:vartype linked_parameter_default_value_mapping: dict[str, str]
:ivar linked_parameter_key_name:
:vartype linked_parameter_key_name: str
:ivar support_link_setting:
:vartype support_link_setting: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'run_setting_ui_hint': {'key': 'runSettingUIHint', 'type': 'RunSettingUIParameterHint'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'section_name': {'key': 'sectionName', 'type': 'str'},
'section_description': {'key': 'sectionDescription', 'type': 'str'},
'section_argument_name': {'key': 'sectionArgumentName', 'type': 'str'},
'examples': {'key': 'examples', 'type': '[str]'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'disabled_by_parameters': {'key': 'disabledByParameters', 'type': '[str]'},
'module_run_setting_type': {'key': 'moduleRunSettingType', 'type': 'str'},
'linked_parameter_default_value_mapping': {'key': 'linkedParameterDefaultValueMapping', 'type': '{str}'},
'linked_parameter_key_name': {'key': 'linkedParameterKeyName', 'type': 'str'},
'support_link_setting': {'key': 'supportLinkSetting', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool",
"String", "JsonString", "YamlString", "StringList".
:paramtype parameter_type: str or ~flow.models.RunSettingParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword description:
:paramtype description: str
:keyword run_setting_ui_hint:
:paramtype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:keyword argument_name:
:paramtype argument_name: str
:keyword section_name:
:paramtype section_name: str
:keyword section_description:
:paramtype section_description: str
:keyword section_argument_name:
:paramtype section_argument_name: str
:keyword examples:
:paramtype examples: list[str]
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword disabled_by_parameters:
:paramtype disabled_by_parameters: list[str]
:keyword module_run_setting_type: Possible values include: "All", "Released", "Default",
"Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full".
:paramtype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:keyword linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:paramtype linked_parameter_default_value_mapping: dict[str, str]
:keyword linked_parameter_key_name:
:paramtype linked_parameter_key_name: str
:keyword support_link_setting:
:paramtype support_link_setting: bool
"""
super(RunSettingParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.description = kwargs.get('description', None)
self.run_setting_ui_hint = kwargs.get('run_setting_ui_hint', None)
self.argument_name = kwargs.get('argument_name', None)
self.section_name = kwargs.get('section_name', None)
self.section_description = kwargs.get('section_description', None)
self.section_argument_name = kwargs.get('section_argument_name', None)
self.examples = kwargs.get('examples', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.disabled_by_parameters = kwargs.get('disabled_by_parameters', None)
self.module_run_setting_type = kwargs.get('module_run_setting_type', None)
self.linked_parameter_default_value_mapping = kwargs.get('linked_parameter_default_value_mapping', None)
self.linked_parameter_key_name = kwargs.get('linked_parameter_key_name', None)
self.support_link_setting = kwargs.get('support_link_setting', None)
class RunSettingParameterAssignment(msrest.serialization.Model):
"""RunSettingParameterAssignment.
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
:ivar compute_run_settings:
:vartype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar linked_parameter_name:
:vartype linked_parameter_name: str
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
'compute_run_settings': {'key': 'computeRunSettings', 'type': '[RunSettingParameterAssignment]'},
'linked_parameter_name': {'key': 'linkedParameterName', 'type': 'str'},
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
:keyword compute_run_settings:
:paramtype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword linked_parameter_name:
:paramtype linked_parameter_name: str
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(RunSettingParameterAssignment, self).__init__(**kwargs)
self.use_graph_default_compute = kwargs.get('use_graph_default_compute', None)
self.mlc_compute_type = kwargs.get('mlc_compute_type', None)
self.compute_run_settings = kwargs.get('compute_run_settings', None)
self.linked_parameter_name = kwargs.get('linked_parameter_name', None)
self.value_type = kwargs.get('value_type', None)
self.assignments_to_concatenate = kwargs.get('assignments_to_concatenate', None)
self.data_path_assignment = kwargs.get('data_path_assignment', None)
self.data_set_definition_value_assignment = kwargs.get('data_set_definition_value_assignment', None)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class RunSettingUIParameterHint(msrest.serialization.Model):
"""RunSettingUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:vartype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar yaml_editor:
:vartype yaml_editor: ~flow.models.UIYamlEditor
:ivar compute_selection:
:vartype compute_selection: ~flow.models.UIComputeSelection
:ivar hyperparameter_configuration:
:vartype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
:ivar support_reset:
:vartype support_reset: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'yaml_editor': {'key': 'yamlEditor', 'type': 'UIYamlEditor'},
'compute_selection': {'key': 'computeSelection', 'type': 'UIComputeSelection'},
'hyperparameter_configuration': {'key': 'hyperparameterConfiguration', 'type': 'UIHyperparameterConfiguration'},
'ux_ignore': {'key': 'uxIgnore', 'type': 'bool'},
'anonymous': {'key': 'anonymous', 'type': 'bool'},
'support_reset': {'key': 'supportReset', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:paramtype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword yaml_editor:
:paramtype yaml_editor: ~flow.models.UIYamlEditor
:keyword compute_selection:
:paramtype compute_selection: ~flow.models.UIComputeSelection
:keyword hyperparameter_configuration:
:paramtype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
:keyword support_reset:
:paramtype support_reset: bool
"""
super(RunSettingUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.json_editor = kwargs.get('json_editor', None)
self.yaml_editor = kwargs.get('yaml_editor', None)
self.compute_selection = kwargs.get('compute_selection', None)
self.hyperparameter_configuration = kwargs.get('hyperparameter_configuration', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
self.support_reset = kwargs.get('support_reset', None)
class RunStatusPeriod(msrest.serialization.Model):
"""RunStatusPeriod.
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(RunStatusPeriod, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.sub_periods = kwargs.get('sub_periods', None)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class RuntimeConfiguration(msrest.serialization.Model):
"""RuntimeConfiguration.
:ivar base_image:
:vartype base_image: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword version:
:paramtype version: str
"""
super(RuntimeConfiguration, self).__init__(**kwargs)
self.base_image = kwargs.get('base_image', None)
self.version = kwargs.get('version', None)
class RunTypeV2(msrest.serialization.Model):
"""RunTypeV2.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits:
:vartype traits: list[str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_validation = {
'traits': {'unique': True},
}
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '[str]'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits:
:paramtype traits: list[str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2, self).__init__(**kwargs)
self.orchestrator = kwargs.get('orchestrator', None)
self.traits = kwargs.get('traits', None)
self.attribution = kwargs.get('attribution', None)
self.compute_type = kwargs.get('compute_type', None)
class RunTypeV2Index(msrest.serialization.Model):
"""RunTypeV2Index.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits: Dictionary of :code:`<string>`.
:vartype traits: dict[str, str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '{str}'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits: Dictionary of :code:`<string>`.
:paramtype traits: dict[str, str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2Index, self).__init__(**kwargs)
self.orchestrator = kwargs.get('orchestrator', None)
self.traits = kwargs.get('traits', None)
self.attribution = kwargs.get('attribution', None)
self.compute_type = kwargs.get('compute_type', None)
class SampleMeta(msrest.serialization.Model):
"""SampleMeta.
:ivar image:
:vartype image: str
:ivar id:
:vartype id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar doc_link:
:vartype doc_link: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar updated_at:
:vartype updated_at: ~datetime.datetime
:ivar feed_name:
:vartype feed_name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'doc_link': {'key': 'docLink', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'updated_at': {'key': 'updatedAt', 'type': 'iso-8601'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword id:
:paramtype id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword doc_link:
:paramtype doc_link: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword updated_at:
:paramtype updated_at: ~datetime.datetime
:keyword feed_name:
:paramtype feed_name: str
:keyword version:
:paramtype version: str
"""
super(SampleMeta, self).__init__(**kwargs)
self.image = kwargs.get('image', None)
self.id = kwargs.get('id', None)
self.display_name = kwargs.get('display_name', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.doc_link = kwargs.get('doc_link', None)
self.tags = kwargs.get('tags', None)
self.created_at = kwargs.get('created_at', None)
self.updated_at = kwargs.get('updated_at', None)
self.feed_name = kwargs.get('feed_name', None)
self.version = kwargs.get('version', None)
class SavedDataSetReference(msrest.serialization.Model):
"""SavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(SavedDataSetReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class SavePipelineDraftRequest(msrest.serialization.Model):
"""SavePipelineDraftRequest.
:ivar ui_widget_meta_infos:
:vartype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'ui_widget_meta_infos': {'key': 'uiWidgetMetaInfos', 'type': '[UIWidgetMetaInfo]'},
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_meta_infos:
:paramtype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SavePipelineDraftRequest, self).__init__(**kwargs)
self.ui_widget_meta_infos = kwargs.get('ui_widget_meta_infos', None)
self.web_service_inputs = kwargs.get('web_service_inputs', None)
self.web_service_outputs = kwargs.get('web_service_outputs', None)
self.nodes_in_draft = kwargs.get('nodes_in_draft', None)
self.name = kwargs.get('name', None)
self.pipeline_type = kwargs.get('pipeline_type', None)
self.pipeline_draft_mode = kwargs.get('pipeline_draft_mode', None)
self.graph_components_mode = kwargs.get('graph_components_mode', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class ScheduleBase(msrest.serialization.Model):
"""ScheduleBase.
:ivar schedule_status: Possible values include: "Enabled", "Disabled".
:vartype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:ivar schedule_type: Possible values include: "Cron", "Recurrence".
:vartype schedule_type: str or ~flow.models.ScheduleType
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar time_zone:
:vartype time_zone: str
:ivar expression:
:vartype expression: str
:ivar frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:vartype frequency: str or ~flow.models.RecurrenceFrequency
:ivar interval:
:vartype interval: int
:ivar pattern:
:vartype pattern: ~flow.models.RecurrencePattern
"""
_attribute_map = {
'schedule_status': {'key': 'scheduleStatus', 'type': 'str'},
'schedule_type': {'key': 'scheduleType', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
'expression': {'key': 'expression', 'type': 'str'},
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'pattern': {'key': 'pattern', 'type': 'RecurrencePattern'},
}
def __init__(
self,
**kwargs
):
"""
:keyword schedule_status: Possible values include: "Enabled", "Disabled".
:paramtype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:keyword schedule_type: Possible values include: "Cron", "Recurrence".
:paramtype schedule_type: str or ~flow.models.ScheduleType
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword time_zone:
:paramtype time_zone: str
:keyword expression:
:paramtype expression: str
:keyword frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:paramtype frequency: str or ~flow.models.RecurrenceFrequency
:keyword interval:
:paramtype interval: int
:keyword pattern:
:paramtype pattern: ~flow.models.RecurrencePattern
"""
super(ScheduleBase, self).__init__(**kwargs)
self.schedule_status = kwargs.get('schedule_status', None)
self.schedule_type = kwargs.get('schedule_type', None)
self.end_time = kwargs.get('end_time', None)
self.start_time = kwargs.get('start_time', None)
self.time_zone = kwargs.get('time_zone', None)
self.expression = kwargs.get('expression', None)
self.frequency = kwargs.get('frequency', None)
self.interval = kwargs.get('interval', None)
self.pattern = kwargs.get('pattern', None)
class SchemaContractsCreatedBy(msrest.serialization.Model):
"""SchemaContractsCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar user_principal_name:
:vartype user_principal_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'user_principal_name': {'key': 'userPrincipalName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword user_principal_name:
:paramtype user_principal_name: str
"""
super(SchemaContractsCreatedBy, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.user_principal_name = kwargs.get('user_principal_name', None)
class ScopeCloudConfiguration(msrest.serialization.Model):
"""ScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{ArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{ArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(ScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = kwargs.get('input_path_suffixes', None)
self.output_path_suffixes = kwargs.get('output_path_suffixes', None)
self.user_alias = kwargs.get('user_alias', None)
self.tokens = kwargs.get('tokens', None)
self.auto_token = kwargs.get('auto_token', None)
self.vcp = kwargs.get('vcp', None)
class Seasonality(msrest.serialization.Model):
"""Seasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.SeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.SeasonalityMode
:keyword value:
:paramtype value: int
"""
super(Seasonality, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class SecretConfiguration(msrest.serialization.Model):
"""SecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspace_secret_name', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(SecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = kwargs.get('workspace_secret_name', None)
self.uri = kwargs.get('uri', None)
class SegmentedResult1(msrest.serialization.Model):
"""SegmentedResult1.
:ivar value:
:vartype value: list[~flow.models.FlowIndexEntity]
:ivar continuation_token:
:vartype continuation_token: str
:ivar count:
:vartype count: int
:ivar next_link:
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[FlowIndexEntity]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword value:
:paramtype value: list[~flow.models.FlowIndexEntity]
:keyword continuation_token:
:paramtype continuation_token: str
:keyword count:
:paramtype count: int
:keyword next_link:
:paramtype next_link: str
"""
super(SegmentedResult1, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.continuation_token = kwargs.get('continuation_token', None)
self.count = kwargs.get('count', None)
self.next_link = kwargs.get('next_link', None)
class ServiceLogRequest(msrest.serialization.Model):
"""ServiceLogRequest.
:ivar log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error",
"Critical", "None".
:vartype log_level: str or ~flow.models.LogLevel
:ivar message:
:vartype message: str
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
"""
_attribute_map = {
'log_level': {'key': 'logLevel', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword log_level: Possible values include: "Trace", "Debug", "Information", "Warning",
"Error", "Critical", "None".
:paramtype log_level: str or ~flow.models.LogLevel
:keyword message:
:paramtype message: str
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
"""
super(ServiceLogRequest, self).__init__(**kwargs)
self.log_level = kwargs.get('log_level', None)
self.message = kwargs.get('message', None)
self.timestamp = kwargs.get('timestamp', None)
class SessionApplication(msrest.serialization.Model):
"""SessionApplication.
:ivar image:
:vartype image: str
:ivar env_vars: Dictionary of :code:`<string>`.
:vartype env_vars: dict[str, str]
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar setup_results:
:vartype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'env_vars': {'key': 'envVars', 'type': '{str}'},
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'setup_results': {'key': 'setupResults', 'type': '[SessionApplicationRunCommandResult]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword env_vars: Dictionary of :code:`<string>`.
:paramtype env_vars: dict[str, str]
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword setup_results:
:paramtype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
"""
super(SessionApplication, self).__init__(**kwargs)
self.image = kwargs.get('image', None)
self.env_vars = kwargs.get('env_vars', None)
self.python_pip_requirements = kwargs.get('python_pip_requirements', None)
self.setup_results = kwargs.get('setup_results', None)
class SessionApplicationRunCommandResult(msrest.serialization.Model):
"""SessionApplicationRunCommandResult.
:ivar command:
:vartype command: str
:ivar arguments:
:vartype arguments: list[str]
:ivar exit_code:
:vartype exit_code: int
:ivar std_out:
:vartype std_out: str
:ivar std_err:
:vartype std_err: str
"""
_attribute_map = {
'command': {'key': 'command', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'std_out': {'key': 'stdOut', 'type': 'str'},
'std_err': {'key': 'stdErr', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command:
:paramtype command: str
:keyword arguments:
:paramtype arguments: list[str]
:keyword exit_code:
:paramtype exit_code: int
:keyword std_out:
:paramtype std_out: str
:keyword std_err:
:paramtype std_err: str
"""
super(SessionApplicationRunCommandResult, self).__init__(**kwargs)
self.command = kwargs.get('command', None)
self.arguments = kwargs.get('arguments', None)
self.exit_code = kwargs.get('exit_code', None)
self.std_out = kwargs.get('std_out', None)
self.std_err = kwargs.get('std_err', None)
class SessionProperties(msrest.serialization.Model):
"""SessionProperties.
:ivar session_id:
:vartype session_id: str
:ivar subscription_id:
:vartype subscription_id: str
:ivar resource_group_name:
:vartype resource_group_name: str
:ivar workspace_name:
:vartype workspace_name: str
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar application:
:vartype application: ~flow.models.SessionApplication
:ivar last_alive_time:
:vartype last_alive_time: ~datetime.datetime
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'workspace_name': {'key': 'workspaceName', 'type': 'str'},
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'application': {'key': 'application', 'type': 'SessionApplication'},
'last_alive_time': {'key': 'lastAliveTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword subscription_id:
:paramtype subscription_id: str
:keyword resource_group_name:
:paramtype resource_group_name: str
:keyword workspace_name:
:paramtype workspace_name: str
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword application:
:paramtype application: ~flow.models.SessionApplication
:keyword last_alive_time:
:paramtype last_alive_time: ~datetime.datetime
"""
super(SessionProperties, self).__init__(**kwargs)
self.session_id = kwargs.get('session_id', None)
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group_name = kwargs.get('resource_group_name', None)
self.workspace_name = kwargs.get('workspace_name', None)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.application = kwargs.get('application', None)
self.last_alive_time = kwargs.get('last_alive_time', None)
class SetupFlowSessionRequest(msrest.serialization.Model):
"""SetupFlowSessionRequest.
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(SetupFlowSessionRequest, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class SharingScope(msrest.serialization.Model):
"""SharingScope.
:ivar type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:vartype type: str or ~flow.models.ScopeType
:ivar identifier:
:vartype identifier: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'identifier': {'key': 'identifier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:paramtype type: str or ~flow.models.ScopeType
:keyword identifier:
:paramtype identifier: str
"""
super(SharingScope, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.identifier = kwargs.get('identifier', None)
class Snapshot(msrest.serialization.Model):
"""Snapshot.
:ivar id:
:vartype id: str
:ivar directory_name:
:vartype directory_name: str
:ivar snapshot_asset_id:
:vartype snapshot_asset_id: str
:ivar snapshot_entity_id:
:vartype snapshot_entity_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'directory_name': {'key': 'directoryName', 'type': 'str'},
'snapshot_asset_id': {'key': 'snapshotAssetId', 'type': 'str'},
'snapshot_entity_id': {'key': 'snapshotEntityId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword directory_name:
:paramtype directory_name: str
:keyword snapshot_asset_id:
:paramtype snapshot_asset_id: str
:keyword snapshot_entity_id:
:paramtype snapshot_entity_id: str
"""
super(Snapshot, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.directory_name = kwargs.get('directory_name', None)
self.snapshot_asset_id = kwargs.get('snapshot_asset_id', None)
self.snapshot_entity_id = kwargs.get('snapshot_entity_id', None)
class SnapshotInfo(msrest.serialization.Model):
"""SnapshotInfo.
:ivar root_download_url:
:vartype root_download_url: str
:ivar snapshots: This is a dictionary.
:vartype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
_attribute_map = {
'root_download_url': {'key': 'rootDownloadUrl', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '{DownloadResourceInfo}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword root_download_url:
:paramtype root_download_url: str
:keyword snapshots: This is a dictionary.
:paramtype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
super(SnapshotInfo, self).__init__(**kwargs)
self.root_download_url = kwargs.get('root_download_url', None)
self.snapshots = kwargs.get('snapshots', None)
class SourceCodeDataReference(msrest.serialization.Model):
"""SourceCodeDataReference.
:ivar data_store_name:
:vartype data_store_name: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword path:
:paramtype path: str
"""
super(SourceCodeDataReference, self).__init__(**kwargs)
self.data_store_name = kwargs.get('data_store_name', None)
self.path = kwargs.get('path', None)
class SparkConfiguration(msrest.serialization.Model):
"""SparkConfiguration.
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar spark_pool_resource_id:
:vartype spark_pool_resource_id: str
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': '{str}'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'spark_pool_resource_id': {'key': 'sparkPoolResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword spark_pool_resource_id:
:paramtype spark_pool_resource_id: str
"""
super(SparkConfiguration, self).__init__(**kwargs)
self.configuration = kwargs.get('configuration', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.jars = kwargs.get('jars', None)
self.py_files = kwargs.get('py_files', None)
self.spark_pool_resource_id = kwargs.get('spark_pool_resource_id', None)
class SparkJarTaskDto(msrest.serialization.Model):
"""SparkJarTaskDto.
:ivar main_class_name:
:vartype main_class_name: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'main_class_name': {'key': 'main_class_name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword main_class_name:
:paramtype main_class_name: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkJarTaskDto, self).__init__(**kwargs)
self.main_class_name = kwargs.get('main_class_name', None)
self.parameters = kwargs.get('parameters', None)
class SparkJob(msrest.serialization.Model):
"""SparkJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar resources:
:vartype resources: ~flow.models.SparkResourceConfiguration
:ivar args:
:vartype args: str
:ivar code_id:
:vartype code_id: str
:ivar entry:
:vartype entry: ~flow.models.SparkJobEntry
:ivar py_files:
:vartype py_files: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'},
'args': {'key': 'args', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'SparkJobEntry'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'conf': {'key': 'conf', 'type': '{str}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword resources:
:paramtype resources: ~flow.models.SparkResourceConfiguration
:keyword args:
:paramtype args: str
:keyword code_id:
:paramtype code_id: str
:keyword entry:
:paramtype entry: ~flow.models.SparkJobEntry
:keyword py_files:
:paramtype py_files: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(SparkJob, self).__init__(**kwargs)
self.job_type = kwargs.get('job_type', None)
self.resources = kwargs.get('resources', None)
self.args = kwargs.get('args', None)
self.code_id = kwargs.get('code_id', None)
self.entry = kwargs.get('entry', None)
self.py_files = kwargs.get('py_files', None)
self.jars = kwargs.get('jars', None)
self.files = kwargs.get('files', None)
self.archives = kwargs.get('archives', None)
self.environment_id = kwargs.get('environment_id', None)
self.input_data_bindings = kwargs.get('input_data_bindings', None)
self.output_data_bindings = kwargs.get('output_data_bindings', None)
self.conf = kwargs.get('conf', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.parent_job_name = kwargs.get('parent_job_name', None)
self.display_name = kwargs.get('display_name', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.status = kwargs.get('status', None)
self.interaction_endpoints = kwargs.get('interaction_endpoints', None)
self.identity = kwargs.get('identity', None)
self.compute = kwargs.get('compute', None)
self.priority = kwargs.get('priority', None)
self.output = kwargs.get('output', None)
self.is_archived = kwargs.get('is_archived', None)
self.schedule = kwargs.get('schedule', None)
self.component_id = kwargs.get('component_id', None)
self.notification_setting = kwargs.get('notification_setting', None)
self.secrets_configuration = kwargs.get('secrets_configuration', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
class SparkJobEntry(msrest.serialization.Model):
"""SparkJobEntry.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(SparkJobEntry, self).__init__(**kwargs)
self.file = kwargs.get('file', None)
self.class_name = kwargs.get('class_name', None)
class SparkMavenPackage(msrest.serialization.Model):
"""SparkMavenPackage.
:ivar group:
:vartype group: str
:ivar artifact:
:vartype artifact: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'group': {'key': 'group', 'type': 'str'},
'artifact': {'key': 'artifact', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword group:
:paramtype group: str
:keyword artifact:
:paramtype artifact: str
:keyword version:
:paramtype version: str
"""
super(SparkMavenPackage, self).__init__(**kwargs)
self.group = kwargs.get('group', None)
self.artifact = kwargs.get('artifact', None)
self.version = kwargs.get('version', None)
class SparkPythonTaskDto(msrest.serialization.Model):
"""SparkPythonTaskDto.
:ivar python_file:
:vartype python_file: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'python_file': {'key': 'python_file', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword python_file:
:paramtype python_file: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkPythonTaskDto, self).__init__(**kwargs)
self.python_file = kwargs.get('python_file', None)
self.parameters = kwargs.get('parameters', None)
class SparkResourceConfiguration(msrest.serialization.Model):
"""SparkResourceConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar runtime_version:
:vartype runtime_version: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword runtime_version:
:paramtype runtime_version: str
"""
super(SparkResourceConfiguration, self).__init__(**kwargs)
self.instance_type = kwargs.get('instance_type', None)
self.runtime_version = kwargs.get('runtime_version', None)
class SparkSection(msrest.serialization.Model):
"""SparkSection.
:ivar repositories:
:vartype repositories: list[str]
:ivar packages:
:vartype packages: list[~flow.models.SparkMavenPackage]
:ivar precache_packages:
:vartype precache_packages: bool
"""
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '[str]'},
'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword repositories:
:paramtype repositories: list[str]
:keyword packages:
:paramtype packages: list[~flow.models.SparkMavenPackage]
:keyword precache_packages:
:paramtype precache_packages: bool
"""
super(SparkSection, self).__init__(**kwargs)
self.repositories = kwargs.get('repositories', None)
self.packages = kwargs.get('packages', None)
self.precache_packages = kwargs.get('precache_packages', None)
class SparkSubmitTaskDto(msrest.serialization.Model):
"""SparkSubmitTaskDto.
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkSubmitTaskDto, self).__init__(**kwargs)
self.parameters = kwargs.get('parameters', None)
class SqlDataPath(msrest.serialization.Model):
"""SqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
super(SqlDataPath, self).__init__(**kwargs)
self.sql_table_name = kwargs.get('sql_table_name', None)
self.sql_query = kwargs.get('sql_query', None)
self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
class StackEnsembleSettings(msrest.serialization.Model):
"""StackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(StackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None)
self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', None)
self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None)
class StandbyPoolProperties(msrest.serialization.Model):
"""StandbyPoolProperties.
:ivar name:
:vartype name: str
:ivar count:
:vartype count: int
:ivar vm_size:
:vartype vm_size: str
:ivar standby_available_instances:
:vartype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'standby_available_instances': {'key': 'standbyAvailableInstances', 'type': '[StandbyPoolResourceStatus]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword count:
:paramtype count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword standby_available_instances:
:paramtype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
super(StandbyPoolProperties, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.count = kwargs.get('count', None)
self.vm_size = kwargs.get('vm_size', None)
self.standby_available_instances = kwargs.get('standby_available_instances', None)
class StandbyPoolResourceStatus(msrest.serialization.Model):
"""StandbyPoolResourceStatus.
:ivar status:
:vartype status: str
:ivar error:
:vartype error: ~flow.models.CloudError
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'CloudError'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status:
:paramtype status: str
:keyword error:
:paramtype error: ~flow.models.CloudError
"""
super(StandbyPoolResourceStatus, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class StartRunResult(msrest.serialization.Model):
"""StartRunResult.
All required parameters must be populated in order to send to Azure.
:ivar run_id: Required.
:vartype run_id: str
"""
_validation = {
'run_id': {'required': True, 'min_length': 1},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword run_id: Required.
:paramtype run_id: str
"""
super(StartRunResult, self).__init__(**kwargs)
self.run_id = kwargs['run_id']
class StepRunProfile(msrest.serialization.Model):
"""StepRunProfile.
:ivar step_run_id:
:vartype step_run_id: str
:ivar step_run_number:
:vartype step_run_number: int
:ivar run_url:
:vartype run_url: str
:ivar compute_target:
:vartype compute_target: str
:ivar compute_target_url:
:vartype compute_target_url: str
:ivar node_id:
:vartype node_id: str
:ivar node_name:
:vartype node_name: str
:ivar step_name:
:vartype step_name: str
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar reused_step_run_id:
:vartype reused_step_run_id: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar status_timeline:
:vartype status_timeline: list[~flow.models.RunStatusPeriod]
"""
_attribute_map = {
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'step_run_number': {'key': 'stepRunNumber', 'type': 'int'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_target_url': {'key': 'computeTargetUrl', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'reused_step_run_id': {'key': 'reusedStepRunId', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'status_timeline': {'key': 'statusTimeline', 'type': '[RunStatusPeriod]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword step_run_id:
:paramtype step_run_id: str
:keyword step_run_number:
:paramtype step_run_number: int
:keyword run_url:
:paramtype run_url: str
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_target_url:
:paramtype compute_target_url: str
:keyword node_id:
:paramtype node_id: str
:keyword node_name:
:paramtype node_name: str
:keyword step_name:
:paramtype step_name: str
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword reused_step_run_id:
:paramtype reused_step_run_id: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword status_timeline:
:paramtype status_timeline: list[~flow.models.RunStatusPeriod]
"""
super(StepRunProfile, self).__init__(**kwargs)
self.step_run_id = kwargs.get('step_run_id', None)
self.step_run_number = kwargs.get('step_run_number', None)
self.run_url = kwargs.get('run_url', None)
self.compute_target = kwargs.get('compute_target', None)
self.compute_target_url = kwargs.get('compute_target_url', None)
self.node_id = kwargs.get('node_id', None)
self.node_name = kwargs.get('node_name', None)
self.step_name = kwargs.get('step_name', None)
self.create_time = kwargs.get('create_time', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.status = kwargs.get('status', None)
self.status_detail = kwargs.get('status_detail', None)
self.is_reused = kwargs.get('is_reused', None)
self.reused_pipeline_run_id = kwargs.get('reused_pipeline_run_id', None)
self.reused_step_run_id = kwargs.get('reused_step_run_id', None)
self.tags = kwargs.get('tags', None)
self.status_timeline = kwargs.get('status_timeline', None)
class StorageInfo(msrest.serialization.Model):
"""StorageInfo.
:ivar storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:vartype storage_auth_type: str or ~flow.models.StorageAuthType
:ivar connection_string:
:vartype connection_string: str
:ivar sas_token:
:vartype sas_token: str
:ivar account_name:
:vartype account_name: str
"""
_attribute_map = {
'storage_auth_type': {'key': 'storageAuthType', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'account_name': {'key': 'accountName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:paramtype storage_auth_type: str or ~flow.models.StorageAuthType
:keyword connection_string:
:paramtype connection_string: str
:keyword sas_token:
:paramtype sas_token: str
:keyword account_name:
:paramtype account_name: str
"""
super(StorageInfo, self).__init__(**kwargs)
self.storage_auth_type = kwargs.get('storage_auth_type', None)
self.connection_string = kwargs.get('connection_string', None)
self.sas_token = kwargs.get('sas_token', None)
self.account_name = kwargs.get('account_name', None)
class StoredProcedureParameter(msrest.serialization.Model):
"""StoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.StoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.StoredProcedureParameterType
"""
super(StoredProcedureParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
self.type = kwargs.get('type', None)
class Stream(msrest.serialization.Model):
"""Stream.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar can_read:
:vartype can_read: bool
:ivar can_write:
:vartype can_write: bool
:ivar can_seek:
:vartype can_seek: bool
:ivar can_timeout:
:vartype can_timeout: bool
:ivar length:
:vartype length: long
:ivar position:
:vartype position: long
:ivar read_timeout:
:vartype read_timeout: int
:ivar write_timeout:
:vartype write_timeout: int
"""
_validation = {
'can_read': {'readonly': True},
'can_write': {'readonly': True},
'can_seek': {'readonly': True},
'can_timeout': {'readonly': True},
'length': {'readonly': True},
}
_attribute_map = {
'can_read': {'key': 'canRead', 'type': 'bool'},
'can_write': {'key': 'canWrite', 'type': 'bool'},
'can_seek': {'key': 'canSeek', 'type': 'bool'},
'can_timeout': {'key': 'canTimeout', 'type': 'bool'},
'length': {'key': 'length', 'type': 'long'},
'position': {'key': 'position', 'type': 'long'},
'read_timeout': {'key': 'readTimeout', 'type': 'int'},
'write_timeout': {'key': 'writeTimeout', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword position:
:paramtype position: long
:keyword read_timeout:
:paramtype read_timeout: int
:keyword write_timeout:
:paramtype write_timeout: int
"""
super(Stream, self).__init__(**kwargs)
self.can_read = None
self.can_write = None
self.can_seek = None
self.can_timeout = None
self.length = None
self.position = kwargs.get('position', None)
self.read_timeout = kwargs.get('read_timeout', None)
self.write_timeout = kwargs.get('write_timeout', None)
class StructuredInterface(msrest.serialization.Model):
"""StructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.StructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.StructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[StructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[StructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[StructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[StructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.StructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.StructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(StructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = kwargs.get('command_line_pattern', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.control_outputs = kwargs.get('control_outputs', None)
self.parameters = kwargs.get('parameters', None)
self.metadata_parameters = kwargs.get('metadata_parameters', None)
self.arguments = kwargs.get('arguments', None)
class StructuredInterfaceInput(msrest.serialization.Model):
"""StructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.DatasetType]
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.DatasetType]
"""
super(StructuredInterfaceInput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_ids_list = kwargs.get('data_type_ids_list', None)
self.is_optional = kwargs.get('is_optional', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_resource = kwargs.get('is_resource', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.dataset_types = kwargs.get('dataset_types', None)
class StructuredInterfaceOutput(msrest.serialization.Model):
"""StructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.TrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.DatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'IsArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'TrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'DatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.TrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.DatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(StructuredInterfaceOutput, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.data_type_id = kwargs.get('data_type_id', None)
self.pass_through_data_type_input_name = kwargs.get('pass_through_data_type_input_name', None)
self.description = kwargs.get('description', None)
self.skip_processing = kwargs.get('skip_processing', None)
self.is_artifact = kwargs.get('is_artifact', None)
self.data_store_name = kwargs.get('data_store_name', None)
self.data_store_mode = kwargs.get('data_store_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
self.overwrite = kwargs.get('overwrite', None)
self.data_reference_name = kwargs.get('data_reference_name', None)
self.training_output = kwargs.get('training_output', None)
self.dataset_output = kwargs.get('dataset_output', None)
self.asset_output_settings = kwargs.get('asset_output_settings', None)
self.early_available = kwargs.get('early_available', None)
class StructuredInterfaceParameter(msrest.serialization.Model):
"""StructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.ParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.ParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(StructuredInterfaceParameter, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.label = kwargs.get('label', None)
self.parameter_type = kwargs.get('parameter_type', None)
self.is_optional = kwargs.get('is_optional', None)
self.default_value = kwargs.get('default_value', None)
self.lower_bound = kwargs.get('lower_bound', None)
self.upper_bound = kwargs.get('upper_bound', None)
self.enum_values = kwargs.get('enum_values', None)
self.enum_values_to_argument_strings = kwargs.get('enum_values_to_argument_strings', None)
self.description = kwargs.get('description', None)
self.set_environment_variable = kwargs.get('set_environment_variable', None)
self.environment_variable_override = kwargs.get('environment_variable_override', None)
self.enabled_by_parameter_name = kwargs.get('enabled_by_parameter_name', None)
self.enabled_by_parameter_values = kwargs.get('enabled_by_parameter_values', None)
self.ui_hint = kwargs.get('ui_hint', None)
self.group_names = kwargs.get('group_names', None)
self.argument_name = kwargs.get('argument_name', None)
class StudioMigrationInfo(msrest.serialization.Model):
"""StudioMigrationInfo.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar source_workspace_id:
:vartype source_workspace_id: str
:ivar source_experiment_id:
:vartype source_experiment_id: str
:ivar source_experiment_link:
:vartype source_experiment_link: str
:ivar failed_node_id_list:
:vartype failed_node_id_list: list[str]
:ivar error_message:
:vartype error_message: str
"""
_validation = {
'error_message': {'readonly': True},
}
_attribute_map = {
'source_workspace_id': {'key': 'sourceWorkspaceId', 'type': 'str'},
'source_experiment_id': {'key': 'sourceExperimentId', 'type': 'str'},
'source_experiment_link': {'key': 'sourceExperimentLink', 'type': 'str'},
'failed_node_id_list': {'key': 'failedNodeIdList', 'type': '[str]'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword source_workspace_id:
:paramtype source_workspace_id: str
:keyword source_experiment_id:
:paramtype source_experiment_id: str
:keyword source_experiment_link:
:paramtype source_experiment_link: str
:keyword failed_node_id_list:
:paramtype failed_node_id_list: list[str]
"""
super(StudioMigrationInfo, self).__init__(**kwargs)
self.source_workspace_id = kwargs.get('source_workspace_id', None)
self.source_experiment_id = kwargs.get('source_experiment_id', None)
self.source_experiment_link = kwargs.get('source_experiment_link', None)
self.failed_node_id_list = kwargs.get('failed_node_id_list', None)
self.error_message = None
class SubGraphConcatenateAssignment(msrest.serialization.Model):
"""SubGraphConcatenateAssignment.
:ivar concatenate_parameter:
:vartype concatenate_parameter: list[~flow.models.ParameterAssignment]
:ivar parameter_assignments:
:vartype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
_attribute_map = {
'concatenate_parameter': {'key': 'concatenateParameter', 'type': '[ParameterAssignment]'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': 'SubPipelineParameterAssignment'},
}
def __init__(
self,
**kwargs
):
"""
:keyword concatenate_parameter:
:paramtype concatenate_parameter: list[~flow.models.ParameterAssignment]
:keyword parameter_assignments:
:paramtype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
super(SubGraphConcatenateAssignment, self).__init__(**kwargs)
self.concatenate_parameter = kwargs.get('concatenate_parameter', None)
self.parameter_assignments = kwargs.get('parameter_assignments', None)
class SubGraphConfiguration(msrest.serialization.Model):
"""SubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'CloudPrioritySetting'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(SubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = kwargs.get('graph_id', None)
self.graph_draft_id = kwargs.get('graph_draft_id', None)
self.default_cloud_priority = kwargs.get('default_cloud_priority', None)
self.is_dynamic = kwargs.get('is_dynamic', False)
class SubGraphConnectionInfo(msrest.serialization.Model):
"""SubGraphConnectionInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
"""
super(SubGraphConnectionInfo, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
class SubGraphDataPathParameterAssignment(msrest.serialization.Model):
"""SubGraphDataPathParameterAssignment.
:ivar data_set_path_parameter:
:vartype data_set_path_parameter: ~flow.models.DataSetPathParameter
:ivar data_set_path_parameter_assignments:
:vartype data_set_path_parameter_assignments: list[str]
"""
_attribute_map = {
'data_set_path_parameter': {'key': 'dataSetPathParameter', 'type': 'DataSetPathParameter'},
'data_set_path_parameter_assignments': {'key': 'dataSetPathParameterAssignments', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword data_set_path_parameter:
:paramtype data_set_path_parameter: ~flow.models.DataSetPathParameter
:keyword data_set_path_parameter_assignments:
:paramtype data_set_path_parameter_assignments: list[str]
"""
super(SubGraphDataPathParameterAssignment, self).__init__(**kwargs)
self.data_set_path_parameter = kwargs.get('data_set_path_parameter', None)
self.data_set_path_parameter_assignments = kwargs.get('data_set_path_parameter_assignments', None)
class SubGraphInfo(msrest.serialization.Model):
"""SubGraphInfo.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar parent_graph_id:
:vartype parent_graph_id: str
:ivar pipeline_definition_id:
:vartype pipeline_definition_id: str
:ivar sub_graph_parameter_assignment:
:vartype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:ivar sub_graph_concatenate_assignment:
:vartype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:ivar sub_graph_data_path_parameter_assignment:
:vartype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:ivar sub_graph_default_compute_target_nodes:
:vartype sub_graph_default_compute_target_nodes: list[str]
:ivar sub_graph_default_data_store_nodes:
:vartype sub_graph_default_data_store_nodes: list[str]
:ivar inputs:
:vartype inputs: list[~flow.models.SubGraphPortInfo]
:ivar outputs:
:vartype outputs: list[~flow.models.SubGraphPortInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'parent_graph_id': {'key': 'parentGraphId', 'type': 'str'},
'pipeline_definition_id': {'key': 'pipelineDefinitionId', 'type': 'str'},
'sub_graph_parameter_assignment': {'key': 'subGraphParameterAssignment', 'type': '[SubGraphParameterAssignment]'},
'sub_graph_concatenate_assignment': {'key': 'subGraphConcatenateAssignment', 'type': '[SubGraphConcatenateAssignment]'},
'sub_graph_data_path_parameter_assignment': {'key': 'subGraphDataPathParameterAssignment', 'type': '[SubGraphDataPathParameterAssignment]'},
'sub_graph_default_compute_target_nodes': {'key': 'subGraphDefaultComputeTargetNodes', 'type': '[str]'},
'sub_graph_default_data_store_nodes': {'key': 'subGraphDefaultDataStoreNodes', 'type': '[str]'},
'inputs': {'key': 'inputs', 'type': '[SubGraphPortInfo]'},
'outputs': {'key': 'outputs', 'type': '[SubGraphPortInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword parent_graph_id:
:paramtype parent_graph_id: str
:keyword pipeline_definition_id:
:paramtype pipeline_definition_id: str
:keyword sub_graph_parameter_assignment:
:paramtype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:keyword sub_graph_concatenate_assignment:
:paramtype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:keyword sub_graph_data_path_parameter_assignment:
:paramtype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:keyword sub_graph_default_compute_target_nodes:
:paramtype sub_graph_default_compute_target_nodes: list[str]
:keyword sub_graph_default_data_store_nodes:
:paramtype sub_graph_default_data_store_nodes: list[str]
:keyword inputs:
:paramtype inputs: list[~flow.models.SubGraphPortInfo]
:keyword outputs:
:paramtype outputs: list[~flow.models.SubGraphPortInfo]
"""
super(SubGraphInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.default_compute_target = kwargs.get('default_compute_target', None)
self.default_data_store = kwargs.get('default_data_store', None)
self.id = kwargs.get('id', None)
self.parent_graph_id = kwargs.get('parent_graph_id', None)
self.pipeline_definition_id = kwargs.get('pipeline_definition_id', None)
self.sub_graph_parameter_assignment = kwargs.get('sub_graph_parameter_assignment', None)
self.sub_graph_concatenate_assignment = kwargs.get('sub_graph_concatenate_assignment', None)
self.sub_graph_data_path_parameter_assignment = kwargs.get('sub_graph_data_path_parameter_assignment', None)
self.sub_graph_default_compute_target_nodes = kwargs.get('sub_graph_default_compute_target_nodes', None)
self.sub_graph_default_data_store_nodes = kwargs.get('sub_graph_default_data_store_nodes', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
class SubGraphParameterAssignment(msrest.serialization.Model):
"""SubGraphParameterAssignment.
:ivar parameter:
:vartype parameter: ~flow.models.Parameter
:ivar parameter_assignments:
:vartype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
_attribute_map = {
'parameter': {'key': 'parameter', 'type': 'Parameter'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': '[SubPipelineParameterAssignment]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword parameter:
:paramtype parameter: ~flow.models.Parameter
:keyword parameter_assignments:
:paramtype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
super(SubGraphParameterAssignment, self).__init__(**kwargs)
self.parameter = kwargs.get('parameter', None)
self.parameter_assignments = kwargs.get('parameter_assignments', None)
class SubGraphPortInfo(msrest.serialization.Model):
"""SubGraphPortInfo.
:ivar name:
:vartype name: str
:ivar internal:
:vartype internal: list[~flow.models.SubGraphConnectionInfo]
:ivar external:
:vartype external: list[~flow.models.SubGraphConnectionInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'internal': {'key': 'internal', 'type': '[SubGraphConnectionInfo]'},
'external': {'key': 'external', 'type': '[SubGraphConnectionInfo]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword internal:
:paramtype internal: list[~flow.models.SubGraphConnectionInfo]
:keyword external:
:paramtype external: list[~flow.models.SubGraphConnectionInfo]
"""
super(SubGraphPortInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.internal = kwargs.get('internal', None)
self.external = kwargs.get('external', None)
class SubmitBulkRunRequest(msrest.serialization.Model):
"""SubmitBulkRunRequest.
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar run_id:
:vartype run_id: str
:ivar run_display_name:
:vartype run_display_name: str
:ivar run_experiment_name:
:vartype run_experiment_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar node_variant:
:vartype node_variant: str
:ivar variant_run_id:
:vartype variant_run_id: str
:ivar baseline_run_id:
:vartype baseline_run_id: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar session_id:
:vartype session_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:vartype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:ivar output_data_store:
:vartype output_data_store: str
:ivar flow_lineage_id:
:vartype flow_lineage_id: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
_attribute_map = {
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_display_name': {'key': 'runDisplayName', 'type': 'str'},
'run_experiment_name': {'key': 'runExperimentName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'variant_run_id': {'key': 'variantRunId', 'type': 'str'},
'baseline_run_id': {'key': 'baselineRunId', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'session_setup_mode': {'key': 'sessionSetupMode', 'type': 'str'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'flow_lineage_id': {'key': 'flowLineageId', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword run_id:
:paramtype run_id: str
:keyword run_display_name:
:paramtype run_display_name: str
:keyword run_experiment_name:
:paramtype run_experiment_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword node_variant:
:paramtype node_variant: str
:keyword variant_run_id:
:paramtype variant_run_id: str
:keyword baseline_run_id:
:paramtype baseline_run_id: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword session_id:
:paramtype session_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:paramtype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:keyword output_data_store:
:paramtype output_data_store: str
:keyword flow_lineage_id:
:paramtype flow_lineage_id: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
super(SubmitBulkRunRequest, self).__init__(**kwargs)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_definition_resource_id = kwargs.get('flow_definition_resource_id', None)
self.flow_definition_data_store_name = kwargs.get('flow_definition_data_store_name', None)
self.flow_definition_blob_path = kwargs.get('flow_definition_blob_path', None)
self.flow_definition_data_uri = kwargs.get('flow_definition_data_uri', None)
self.run_id = kwargs.get('run_id', None)
self.run_display_name = kwargs.get('run_display_name', None)
self.run_experiment_name = kwargs.get('run_experiment_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.properties = kwargs.get('properties', None)
self.node_variant = kwargs.get('node_variant', None)
self.variant_run_id = kwargs.get('variant_run_id', None)
self.baseline_run_id = kwargs.get('baseline_run_id', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.inputs_mapping = kwargs.get('inputs_mapping', None)
self.connections = kwargs.get('connections', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.session_id = kwargs.get('session_id', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.session_setup_mode = kwargs.get('session_setup_mode', None)
self.output_data_store = kwargs.get('output_data_store', None)
self.flow_lineage_id = kwargs.get('flow_lineage_id', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
class SubmitBulkRunResponse(msrest.serialization.Model):
"""SubmitBulkRunResponse.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(SubmitBulkRunResponse, self).__init__(**kwargs)
self.next_action_interval_in_seconds = kwargs.get('next_action_interval_in_seconds', None)
self.action_type = kwargs.get('action_type', None)
self.flow_runs = kwargs.get('flow_runs', None)
self.node_runs = kwargs.get('node_runs', None)
self.error_response = kwargs.get('error_response', None)
self.flow_name = kwargs.get('flow_name', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_graph = kwargs.get('flow_graph', None)
self.flow_graph_layout = kwargs.get('flow_graph_layout', None)
self.flow_run_resource_id = kwargs.get('flow_run_resource_id', None)
self.bulk_test_id = kwargs.get('bulk_test_id', None)
self.batch_inputs = kwargs.get('batch_inputs', None)
self.batch_data_input = kwargs.get('batch_data_input', None)
self.created_by = kwargs.get('created_by', None)
self.created_on = kwargs.get('created_on', None)
self.flow_run_type = kwargs.get('flow_run_type', None)
self.flow_type = kwargs.get('flow_type', None)
self.runtime_name = kwargs.get('runtime_name', None)
self.aml_compute_name = kwargs.get('aml_compute_name', None)
self.flow_run_logs = kwargs.get('flow_run_logs', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.working_directory = kwargs.get('working_directory', None)
self.flow_dag_file_relative_path = kwargs.get('flow_dag_file_relative_path', None)
self.flow_snapshot_id = kwargs.get('flow_snapshot_id', None)
self.variant_run_to_evaluation_runs_id_mapping = kwargs.get('variant_run_to_evaluation_runs_id_mapping', None)
class SubmitFlowRequest(msrest.serialization.Model):
"""SubmitFlowRequest.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar async_submission:
:vartype async_submission: bool
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar use_flow_snapshot_to_submit:
:vartype use_flow_snapshot_to_submit: bool
:ivar enable_blob_run_artifacts:
:vartype enable_blob_run_artifacts: bool
:ivar enable_async_flow_test:
:vartype enable_async_flow_test: bool
:ivar flow_runtime_submission_api_version: Possible values include: "Version1", "Version2".
:vartype flow_runtime_submission_api_version: str or
~flow.models.FlowRuntimeSubmissionApiVersion
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'async_submission': {'key': 'asyncSubmission', 'type': 'bool'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'use_flow_snapshot_to_submit': {'key': 'useFlowSnapshotToSubmit', 'type': 'bool'},
'enable_blob_run_artifacts': {'key': 'enableBlobRunArtifacts', 'type': 'bool'},
'enable_async_flow_test': {'key': 'enableAsyncFlowTest', 'type': 'bool'},
'flow_runtime_submission_api_version': {'key': 'flowRuntimeSubmissionApiVersion', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword async_submission:
:paramtype async_submission: bool
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword use_flow_snapshot_to_submit:
:paramtype use_flow_snapshot_to_submit: bool
:keyword enable_blob_run_artifacts:
:paramtype enable_blob_run_artifacts: bool
:keyword enable_async_flow_test:
:paramtype enable_async_flow_test: bool
:keyword flow_runtime_submission_api_version: Possible values include: "Version1", "Version2".
:paramtype flow_runtime_submission_api_version: str or
~flow.models.FlowRuntimeSubmissionApiVersion
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
super(SubmitFlowRequest, self).__init__(**kwargs)
self.flow_run_id = kwargs.get('flow_run_id', None)
self.flow_run_display_name = kwargs.get('flow_run_display_name', None)
self.flow_id = kwargs.get('flow_id', None)
self.flow = kwargs.get('flow', None)
self.flow_submit_run_settings = kwargs.get('flow_submit_run_settings', None)
self.async_submission = kwargs.get('async_submission', None)
self.use_workspace_connection = kwargs.get('use_workspace_connection', None)
self.use_flow_snapshot_to_submit = kwargs.get('use_flow_snapshot_to_submit', None)
self.enable_blob_run_artifacts = kwargs.get('enable_blob_run_artifacts', None)
self.enable_async_flow_test = kwargs.get('enable_async_flow_test', None)
self.flow_runtime_submission_api_version = kwargs.get('flow_runtime_submission_api_version', None)
self.run_display_name_generation_type = kwargs.get('run_display_name_generation_type', None)
class SubmitPipelineRunRequest(msrest.serialization.Model):
"""SubmitPipelineRunRequest.
:ivar compute_target:
:vartype compute_target: str
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_target:
:paramtype compute_target: str
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SubmitPipelineRunRequest, self).__init__(**kwargs)
self.compute_target = kwargs.get('compute_target', None)
self.flattened_sub_graphs = kwargs.get('flattened_sub_graphs', None)
self.step_tags = kwargs.get('step_tags', None)
self.experiment_name = kwargs.get('experiment_name', None)
self.pipeline_parameters = kwargs.get('pipeline_parameters', None)
self.data_path_assignments = kwargs.get('data_path_assignments', None)
self.data_set_definition_value_assignments = kwargs.get('data_set_definition_value_assignments', None)
self.asset_output_settings_assignments = kwargs.get('asset_output_settings_assignments', None)
self.enable_notification = kwargs.get('enable_notification', None)
self.sub_pipelines_info = kwargs.get('sub_pipelines_info', None)
self.display_name = kwargs.get('display_name', None)
self.run_id = kwargs.get('run_id', None)
self.parent_run_id = kwargs.get('parent_run_id', None)
self.graph = kwargs.get('graph', None)
self.pipeline_run_settings = kwargs.get('pipeline_run_settings', None)
self.module_node_run_settings = kwargs.get('module_node_run_settings', None)
self.module_node_ui_input_settings = kwargs.get('module_node_ui_input_settings', None)
self.tags = kwargs.get('tags', None)
self.continue_run_on_step_failure = kwargs.get('continue_run_on_step_failure', None)
self.description = kwargs.get('description', None)
self.properties = kwargs.get('properties', None)
self.enforce_rerun = kwargs.get('enforce_rerun', None)
self.dataset_access_modes = kwargs.get('dataset_access_modes', None)
class SubPipelineDefinition(msrest.serialization.Model):
"""SubPipelineDefinition.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar pipeline_function_name:
:vartype pipeline_function_name: str
:ivar id:
:vartype id: str
:ivar parent_definition_id:
:vartype parent_definition_id: str
:ivar from_module_name:
:vartype from_module_name: str
:ivar parameter_list:
:vartype parameter_list: list[~flow.models.Kwarg]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'pipeline_function_name': {'key': 'pipelineFunctionName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'parent_definition_id': {'key': 'parentDefinitionId', 'type': 'str'},
'from_module_name': {'key': 'fromModuleName', 'type': 'str'},
'parameter_list': {'key': 'parameterList', 'type': '[Kwarg]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword pipeline_function_name:
:paramtype pipeline_function_name: str
:keyword id:
:paramtype id: str
:keyword parent_definition_id:
:paramtype parent_definition_id: str
:keyword from_module_name:
:paramtype from_module_name: str
:keyword parameter_list:
:paramtype parameter_list: list[~flow.models.Kwarg]
"""
super(SubPipelineDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.default_compute_target = kwargs.get('default_compute_target', None)
self.default_data_store = kwargs.get('default_data_store', None)
self.pipeline_function_name = kwargs.get('pipeline_function_name', None)
self.id = kwargs.get('id', None)
self.parent_definition_id = kwargs.get('parent_definition_id', None)
self.from_module_name = kwargs.get('from_module_name', None)
self.parameter_list = kwargs.get('parameter_list', None)
class SubPipelineParameterAssignment(msrest.serialization.Model):
"""SubPipelineParameterAssignment.
:ivar node_id:
:vartype node_id: str
:ivar parameter_name:
:vartype parameter_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword parameter_name:
:paramtype parameter_name: str
"""
super(SubPipelineParameterAssignment, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.parameter_name = kwargs.get('parameter_name', None)
class SubPipelinesInfo(msrest.serialization.Model):
"""SubPipelinesInfo.
:ivar sub_graph_info:
:vartype sub_graph_info: list[~flow.models.SubGraphInfo]
:ivar node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:vartype node_id_to_sub_graph_id_mapping: dict[str, str]
:ivar sub_pipeline_definition:
:vartype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
_attribute_map = {
'sub_graph_info': {'key': 'subGraphInfo', 'type': '[SubGraphInfo]'},
'node_id_to_sub_graph_id_mapping': {'key': 'nodeIdToSubGraphIdMapping', 'type': '{str}'},
'sub_pipeline_definition': {'key': 'subPipelineDefinition', 'type': '[SubPipelineDefinition]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword sub_graph_info:
:paramtype sub_graph_info: list[~flow.models.SubGraphInfo]
:keyword node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:paramtype node_id_to_sub_graph_id_mapping: dict[str, str]
:keyword sub_pipeline_definition:
:paramtype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
super(SubPipelinesInfo, self).__init__(**kwargs)
self.sub_graph_info = kwargs.get('sub_graph_info', None)
self.node_id_to_sub_graph_id_mapping = kwargs.get('node_id_to_sub_graph_id_mapping', None)
self.sub_pipeline_definition = kwargs.get('sub_pipeline_definition', None)
class SubStatusPeriod(msrest.serialization.Model):
"""SubStatusPeriod.
:ivar name:
:vartype name: str
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(SubStatusPeriod, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.sub_periods = kwargs.get('sub_periods', None)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class SweepEarlyTerminationPolicy(msrest.serialization.Model):
"""SweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(SweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = kwargs.get('policy_type', None)
self.evaluation_interval = kwargs.get('evaluation_interval', None)
self.delay_evaluation = kwargs.get('delay_evaluation', None)
self.slack_factor = kwargs.get('slack_factor', None)
self.slack_amount = kwargs.get('slack_amount', None)
self.truncation_percentage = kwargs.get('truncation_percentage', None)
class SweepSettings(msrest.serialization.Model):
"""SweepSettings.
:ivar limits:
:vartype limits: ~flow.models.SweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'SweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'SweepEarlyTerminationPolicy'},
}
def __init__(
self,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.SweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
super(SweepSettings, self).__init__(**kwargs)
self.limits = kwargs.get('limits', None)
self.search_space = kwargs.get('search_space', None)
self.sampling_algorithm = kwargs.get('sampling_algorithm', None)
self.early_termination = kwargs.get('early_termination', None)
class SweepSettingsLimits(msrest.serialization.Model):
"""SweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(SweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = kwargs.get('max_total_trials', None)
self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None)
class SystemData(msrest.serialization.Model):
"""SystemData.
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar created_by:
:vartype created_by: str
:ivar created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype created_by_type: str or ~flow.models.UserType
:ivar last_modified_at:
:vartype last_modified_at: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: str
:ivar last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype last_modified_by_type: str or ~flow.models.UserType
"""
_attribute_map = {
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword created_by:
:paramtype created_by: str
:keyword created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:paramtype created_by_type: str or ~flow.models.UserType
:keyword last_modified_at:
:paramtype last_modified_at: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: str
:keyword last_modified_by_type: Possible values include: "User", "Application",
"ManagedIdentity", "Key".
:paramtype last_modified_by_type: str or ~flow.models.UserType
"""
super(SystemData, self).__init__(**kwargs)
self.created_at = kwargs.get('created_at', None)
self.created_by = kwargs.get('created_by', None)
self.created_by_type = kwargs.get('created_by_type', None)
self.last_modified_at = kwargs.get('last_modified_at', None)
self.last_modified_by = kwargs.get('last_modified_by', None)
self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
class SystemMeta(msrest.serialization.Model):
"""SystemMeta.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar content_hash:
:vartype content_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
_attribute_map = {
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'SystemMetaIdentifierHashes'},
'extra_hashes': {'key': 'extraHashes', 'type': 'SystemMetaExtraHashes'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword content_hash:
:paramtype content_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
super(SystemMeta, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.extra_hash = kwargs.get('extra_hash', None)
self.content_hash = kwargs.get('content_hash', None)
self.identifier_hashes = kwargs.get('identifier_hashes', None)
self.extra_hashes = kwargs.get('extra_hashes', None)
class SystemMetaExtraHashes(msrest.serialization.Model):
"""SystemMetaExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaExtraHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class SystemMetaIdentifierHashes(msrest.serialization.Model):
"""SystemMetaIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = kwargs.get('identifier_hash', None)
self.identifier_hash_v2 = kwargs.get('identifier_hash_v2', None)
class TargetLags(msrest.serialization.Model):
"""TargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(TargetLags, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.values = kwargs.get('values', None)
class TargetRollingWindowSize(msrest.serialization.Model):
"""TargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(TargetRollingWindowSize, self).__init__(**kwargs)
self.mode = kwargs.get('mode', None)
self.value = kwargs.get('value', None)
class TargetSelectorConfiguration(msrest.serialization.Model):
"""TargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(TargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = kwargs.get('low_priority_vm_tolerant', None)
self.cluster_block_list = kwargs.get('cluster_block_list', None)
self.compute_type = kwargs.get('compute_type', None)
self.instance_type = kwargs.get('instance_type', None)
self.instance_types = kwargs.get('instance_types', None)
self.my_resource_only = kwargs.get('my_resource_only', None)
self.plan_id = kwargs.get('plan_id', None)
self.plan_region_id = kwargs.get('plan_region_id', None)
self.region = kwargs.get('region', None)
self.regions = kwargs.get('regions', None)
self.vc_block_list = kwargs.get('vc_block_list', None)
class Task(msrest.serialization.Model):
"""Task.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: int
:ivar exception: Anything.
:vartype exception: any
:ivar status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:vartype status: str or ~flow.models.TaskStatus
:ivar is_canceled:
:vartype is_canceled: bool
:ivar is_completed:
:vartype is_completed: bool
:ivar is_completed_successfully:
:vartype is_completed_successfully: bool
:ivar creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:vartype creation_options: str or ~flow.models.TaskCreationOptions
:ivar async_state: Anything.
:vartype async_state: any
:ivar is_faulted:
:vartype is_faulted: bool
"""
_validation = {
'id': {'readonly': True},
'exception': {'readonly': True},
'is_canceled': {'readonly': True},
'is_completed': {'readonly': True},
'is_completed_successfully': {'readonly': True},
'async_state': {'readonly': True},
'is_faulted': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'exception': {'key': 'exception', 'type': 'object'},
'status': {'key': 'status', 'type': 'str'},
'is_canceled': {'key': 'isCanceled', 'type': 'bool'},
'is_completed': {'key': 'isCompleted', 'type': 'bool'},
'is_completed_successfully': {'key': 'isCompletedSuccessfully', 'type': 'bool'},
'creation_options': {'key': 'creationOptions', 'type': 'str'},
'async_state': {'key': 'asyncState', 'type': 'object'},
'is_faulted': {'key': 'isFaulted', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:paramtype status: str or ~flow.models.TaskStatus
:keyword creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:paramtype creation_options: str or ~flow.models.TaskCreationOptions
"""
super(Task, self).__init__(**kwargs)
self.id = None
self.exception = None
self.status = kwargs.get('status', None)
self.is_canceled = None
self.is_completed = None
self.is_completed_successfully = None
self.creation_options = kwargs.get('creation_options', None)
self.async_state = None
self.is_faulted = None
class TaskControlFlowInfo(msrest.serialization.Model):
"""TaskControlFlowInfo.
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar iteration_index:
:vartype iteration_index: int
:ivar item_name:
:vartype item_name: str
:ivar parameters_overwritten: Dictionary of :code:`<string>`.
:vartype parameters_overwritten: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
"""
_attribute_map = {
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'iteration_index': {'key': 'iterationIndex', 'type': 'int'},
'item_name': {'key': 'itemName', 'type': 'str'},
'parameters_overwritten': {'key': 'parametersOverwritten', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword iteration_index:
:paramtype iteration_index: int
:keyword item_name:
:paramtype item_name: str
:keyword parameters_overwritten: Dictionary of :code:`<string>`.
:paramtype parameters_overwritten: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
"""
super(TaskControlFlowInfo, self).__init__(**kwargs)
self.control_flow_type = kwargs.get('control_flow_type', None)
self.iteration_index = kwargs.get('iteration_index', None)
self.item_name = kwargs.get('item_name', None)
self.parameters_overwritten = kwargs.get('parameters_overwritten', None)
self.is_reused = kwargs.get('is_reused', None)
class TaskReuseInfo(msrest.serialization.Model):
"""TaskReuseInfo.
:ivar experiment_id:
:vartype experiment_id: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar node_start_time:
:vartype node_start_time: ~datetime.datetime
:ivar node_end_time:
:vartype node_end_time: ~datetime.datetime
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'node_start_time': {'key': 'nodeStartTime', 'type': 'iso-8601'},
'node_end_time': {'key': 'nodeEndTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword experiment_id:
:paramtype experiment_id: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword node_start_time:
:paramtype node_start_time: ~datetime.datetime
:keyword node_end_time:
:paramtype node_end_time: ~datetime.datetime
"""
super(TaskReuseInfo, self).__init__(**kwargs)
self.experiment_id = kwargs.get('experiment_id', None)
self.pipeline_run_id = kwargs.get('pipeline_run_id', None)
self.node_id = kwargs.get('node_id', None)
self.request_id = kwargs.get('request_id', None)
self.run_id = kwargs.get('run_id', None)
self.node_start_time = kwargs.get('node_start_time', None)
self.node_end_time = kwargs.get('node_end_time', None)
class TensorflowConfiguration(msrest.serialization.Model):
"""TensorflowConfiguration.
:ivar worker_count:
:vartype worker_count: int
:ivar parameter_server_count:
:vartype parameter_server_count: int
"""
_attribute_map = {
'worker_count': {'key': 'workerCount', 'type': 'int'},
'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword worker_count:
:paramtype worker_count: int
:keyword parameter_server_count:
:paramtype parameter_server_count: int
"""
super(TensorflowConfiguration, self).__init__(**kwargs)
self.worker_count = kwargs.get('worker_count', None)
self.parameter_server_count = kwargs.get('parameter_server_count', None)
class TestDataSettings(msrest.serialization.Model):
"""TestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(TestDataSettings, self).__init__(**kwargs)
self.test_data_size = kwargs.get('test_data_size', None)
class Tool(msrest.serialization.Model):
"""Tool.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.InputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.OutputDefinition]
:ivar description:
:vartype description: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar module:
:vartype module: str
:ivar class_name:
:vartype class_name: str
:ivar source:
:vartype source: str
:ivar lkg_code:
:vartype lkg_code: str
:ivar code:
:vartype code: str
:ivar function:
:vartype function: str
:ivar action_type:
:vartype action_type: str
:ivar provider_config: This is a dictionary.
:vartype provider_config: dict[str, ~flow.models.InputDefinition]
:ivar function_config: This is a dictionary.
:vartype function_config: dict[str, ~flow.models.InputDefinition]
:ivar icon: Anything.
:vartype icon: any
:ivar category:
:vartype category: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, any]
:ivar is_builtin:
:vartype is_builtin: bool
:ivar package:
:vartype package: str
:ivar package_version:
:vartype package_version: str
:ivar default_prompt:
:vartype default_prompt: str
:ivar enable_kwargs:
:vartype enable_kwargs: bool
:ivar deprecated_tools:
:vartype deprecated_tools: list[str]
:ivar tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:vartype tool_state: str or ~flow.models.ToolState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{InputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{OutputDefinition}'},
'description': {'key': 'description', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'module': {'key': 'module', 'type': 'str'},
'class_name': {'key': 'class_name', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'lkg_code': {'key': 'lkgCode', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'function': {'key': 'function', 'type': 'str'},
'action_type': {'key': 'action_type', 'type': 'str'},
'provider_config': {'key': 'provider_config', 'type': '{InputDefinition}'},
'function_config': {'key': 'function_config', 'type': '{InputDefinition}'},
'icon': {'key': 'icon', 'type': 'object'},
'category': {'key': 'category', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{object}'},
'is_builtin': {'key': 'is_builtin', 'type': 'bool'},
'package': {'key': 'package', 'type': 'str'},
'package_version': {'key': 'package_version', 'type': 'str'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
'enable_kwargs': {'key': 'enable_kwargs', 'type': 'bool'},
'deprecated_tools': {'key': 'deprecated_tools', 'type': '[str]'},
'tool_state': {'key': 'tool_state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.InputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.OutputDefinition]
:keyword description:
:paramtype description: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword module:
:paramtype module: str
:keyword class_name:
:paramtype class_name: str
:keyword source:
:paramtype source: str
:keyword lkg_code:
:paramtype lkg_code: str
:keyword code:
:paramtype code: str
:keyword function:
:paramtype function: str
:keyword action_type:
:paramtype action_type: str
:keyword provider_config: This is a dictionary.
:paramtype provider_config: dict[str, ~flow.models.InputDefinition]
:keyword function_config: This is a dictionary.
:paramtype function_config: dict[str, ~flow.models.InputDefinition]
:keyword icon: Anything.
:paramtype icon: any
:keyword category:
:paramtype category: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, any]
:keyword is_builtin:
:paramtype is_builtin: bool
:keyword package:
:paramtype package: str
:keyword package_version:
:paramtype package_version: str
:keyword default_prompt:
:paramtype default_prompt: str
:keyword enable_kwargs:
:paramtype enable_kwargs: bool
:keyword deprecated_tools:
:paramtype deprecated_tools: list[str]
:keyword tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:paramtype tool_state: str or ~flow.models.ToolState
"""
super(Tool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.inputs = kwargs.get('inputs', None)
self.outputs = kwargs.get('outputs', None)
self.description = kwargs.get('description', None)
self.connection_type = kwargs.get('connection_type', None)
self.module = kwargs.get('module', None)
self.class_name = kwargs.get('class_name', None)
self.source = kwargs.get('source', None)
self.lkg_code = kwargs.get('lkg_code', None)
self.code = kwargs.get('code', None)
self.function = kwargs.get('function', None)
self.action_type = kwargs.get('action_type', None)
self.provider_config = kwargs.get('provider_config', None)
self.function_config = kwargs.get('function_config', None)
self.icon = kwargs.get('icon', None)
self.category = kwargs.get('category', None)
self.tags = kwargs.get('tags', None)
self.is_builtin = kwargs.get('is_builtin', None)
self.package = kwargs.get('package', None)
self.package_version = kwargs.get('package_version', None)
self.default_prompt = kwargs.get('default_prompt', None)
self.enable_kwargs = kwargs.get('enable_kwargs', None)
self.deprecated_tools = kwargs.get('deprecated_tools', None)
self.tool_state = kwargs.get('tool_state', None)
class ToolFuncResponse(msrest.serialization.Model):
"""ToolFuncResponse.
:ivar result: Anything.
:vartype result: any
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'object'},
'logs': {'key': 'logs', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword result: Anything.
:paramtype result: any
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
"""
super(ToolFuncResponse, self).__init__(**kwargs)
self.result = kwargs.get('result', None)
self.logs = kwargs.get('logs', None)
class ToolInputDynamicList(msrest.serialization.Model):
"""ToolInputDynamicList.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
"""
super(ToolInputDynamicList, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
class ToolInputGeneratedBy(msrest.serialization.Model):
"""ToolInputGeneratedBy.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
:ivar reverse_func_path:
:vartype reverse_func_path: str
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
'reverse_func_path': {'key': 'reverse_func_path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
:keyword reverse_func_path:
:paramtype reverse_func_path: str
"""
super(ToolInputGeneratedBy, self).__init__(**kwargs)
self.func_path = kwargs.get('func_path', None)
self.func_kwargs = kwargs.get('func_kwargs', None)
self.reverse_func_path = kwargs.get('reverse_func_path', None)
class ToolMetaDto(msrest.serialization.Model):
"""ToolMetaDto.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(ToolMetaDto, self).__init__(**kwargs)
self.tools = kwargs.get('tools', None)
self.errors = kwargs.get('errors', None)
class ToolSetting(msrest.serialization.Model):
"""ToolSetting.
:ivar providers:
:vartype providers: list[~flow.models.ProviderEntity]
"""
_attribute_map = {
'providers': {'key': 'providers', 'type': '[ProviderEntity]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword providers:
:paramtype providers: list[~flow.models.ProviderEntity]
"""
super(ToolSetting, self).__init__(**kwargs)
self.providers = kwargs.get('providers', None)
class ToolSourceMeta(msrest.serialization.Model):
"""ToolSourceMeta.
:ivar tool_type:
:vartype tool_type: str
"""
_attribute_map = {
'tool_type': {'key': 'tool_type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword tool_type:
:paramtype tool_type: str
"""
super(ToolSourceMeta, self).__init__(**kwargs)
self.tool_type = kwargs.get('tool_type', None)
class TorchDistributedConfiguration(msrest.serialization.Model):
"""TorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(TorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = kwargs.get('process_count_per_node', None)
class TrainingDiagnosticConfiguration(msrest.serialization.Model):
"""TrainingDiagnosticConfiguration.
:ivar job_heart_beat_timeout_seconds:
:vartype job_heart_beat_timeout_seconds: int
"""
_attribute_map = {
'job_heart_beat_timeout_seconds': {'key': 'jobHeartBeatTimeoutSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword job_heart_beat_timeout_seconds:
:paramtype job_heart_beat_timeout_seconds: int
"""
super(TrainingDiagnosticConfiguration, self).__init__(**kwargs)
self.job_heart_beat_timeout_seconds = kwargs.get('job_heart_beat_timeout_seconds', None)
class TrainingOutput(msrest.serialization.Model):
"""TrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.TrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.TrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(TrainingOutput, self).__init__(**kwargs)
self.training_output_type = kwargs.get('training_output_type', None)
self.iteration = kwargs.get('iteration', None)
self.metric = kwargs.get('metric', None)
self.model_file = kwargs.get('model_file', None)
class TrainingSettings(msrest.serialization.Model):
"""TrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.TabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.TabularTrainingMode
"""
super(TrainingSettings, self).__init__(**kwargs)
self.block_list_models = kwargs.get('block_list_models', None)
self.allow_list_models = kwargs.get('allow_list_models', None)
self.enable_dnn_training = kwargs.get('enable_dnn_training', None)
self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', None)
self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None)
self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', None)
self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', None)
self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', None)
self.enable_model_explainability = kwargs.get('enable_model_explainability', None)
self.training_mode = kwargs.get('training_mode', None)
class TriggerAsyncOperationStatus(msrest.serialization.Model):
"""TriggerAsyncOperationStatus.
:ivar id:
:vartype id: str
:ivar operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate".
:vartype operation_type: str or ~flow.models.TriggerOperationType
:ivar provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:vartype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:vartype status_code: str or ~flow.models.HttpStatusCode
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'operation_type': {'key': 'operationType', 'type': 'str'},
'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'status_code': {'key': 'statusCode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword operation_type: Possible values include: "Create", "Update", "Delete",
"CreateOrUpdate".
:paramtype operation_type: str or ~flow.models.TriggerOperationType
:keyword provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:paramtype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:paramtype status_code: str or ~flow.models.HttpStatusCode
"""
super(TriggerAsyncOperationStatus, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.operation_type = kwargs.get('operation_type', None)
self.provisioning_status = kwargs.get('provisioning_status', None)
self.created_time = kwargs.get('created_time', None)
self.end_time = kwargs.get('end_time', None)
self.error = kwargs.get('error', None)
self.status_code = kwargs.get('status_code', None)
class TuningNodeSetting(msrest.serialization.Model):
"""TuningNodeSetting.
:ivar variant_ids:
:vartype variant_ids: list[str]
"""
_attribute_map = {
'variant_ids': {'key': 'variantIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword variant_ids:
:paramtype variant_ids: list[str]
"""
super(TuningNodeSetting, self).__init__(**kwargs)
self.variant_ids = kwargs.get('variant_ids', None)
class TypedAssetReference(msrest.serialization.Model):
"""TypedAssetReference.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(TypedAssetReference, self).__init__(**kwargs)
self.asset_id = kwargs.get('asset_id', None)
self.type = kwargs.get('type', None)
class UIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""UIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'UIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
super(UIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = kwargs.get('capabilities', None)
class UIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""UIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(UIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = kwargs.get('completion', None)
self.chat_completion = kwargs.get('chat_completion', None)
self.embeddings = kwargs.get('embeddings', None)
class UIColumnPicker(msrest.serialization.Model):
"""UIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(UIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = kwargs.get('column_picker_for', None)
self.column_selection_categories = kwargs.get('column_selection_categories', None)
self.single_column_selection = kwargs.get('single_column_selection', None)
class UIComputeSelection(msrest.serialization.Model):
"""UIComputeSelection.
:ivar compute_types:
:vartype compute_types: list[str]
:ivar require_gpu:
:vartype require_gpu: bool
:ivar os_types:
:vartype os_types: list[str]
:ivar support_serverless:
:vartype support_serverless: bool
:ivar compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:vartype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
_attribute_map = {
'compute_types': {'key': 'computeTypes', 'type': '[str]'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'os_types': {'key': 'osTypes', 'type': '[str]'},
'support_serverless': {'key': 'supportServerless', 'type': 'bool'},
'compute_run_settings_mapping': {'key': 'computeRunSettingsMapping', 'type': '{[RunSettingParameter]}'},
}
def __init__(
self,
**kwargs
):
"""
:keyword compute_types:
:paramtype compute_types: list[str]
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword os_types:
:paramtype os_types: list[str]
:keyword support_serverless:
:paramtype support_serverless: bool
:keyword compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:paramtype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
super(UIComputeSelection, self).__init__(**kwargs)
self.compute_types = kwargs.get('compute_types', None)
self.require_gpu = kwargs.get('require_gpu', None)
self.os_types = kwargs.get('os_types', None)
self.support_serverless = kwargs.get('support_serverless', None)
self.compute_run_settings_mapping = kwargs.get('compute_run_settings_mapping', None)
class UIHyperparameterConfiguration(msrest.serialization.Model):
"""UIHyperparameterConfiguration.
:ivar model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:vartype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:ivar distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:vartype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'model_name_to_hyper_parameter_and_distribution_mapping': {'key': 'modelNameToHyperParameterAndDistributionMapping', 'type': '{{[str]}}'},
'distribution_parameters_mapping': {'key': 'distributionParametersMapping', 'type': '{[DistributionParameter]}'},
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:paramtype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:keyword distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:paramtype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIHyperparameterConfiguration, self).__init__(**kwargs)
self.model_name_to_hyper_parameter_and_distribution_mapping = kwargs.get('model_name_to_hyper_parameter_and_distribution_mapping', None)
self.distribution_parameters_mapping = kwargs.get('distribution_parameters_mapping', None)
self.json_schema = kwargs.get('json_schema', None)
class UIInputSetting(msrest.serialization.Model):
"""UIInputSetting.
:ivar name:
:vartype name: str
:ivar data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:vartype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_delivery_mode': {'key': 'dataDeliveryMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:paramtype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(UIInputSetting, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.data_delivery_mode = kwargs.get('data_delivery_mode', None)
self.path_on_compute = kwargs.get('path_on_compute', None)
class UIJsonEditor(msrest.serialization.Model):
"""UIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIJsonEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class UIParameterHint(msrest.serialization.Model):
"""UIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.UIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'UIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'UIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'UIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.UIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(UIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
self.column_picker = kwargs.get('column_picker', None)
self.ui_script_language = kwargs.get('ui_script_language', None)
self.json_editor = kwargs.get('json_editor', None)
self.prompt_flow_connection_selector = kwargs.get('prompt_flow_connection_selector', None)
self.azure_open_ai_deployment_name_selector = kwargs.get('azure_open_ai_deployment_name_selector', None)
self.ux_ignore = kwargs.get('ux_ignore', None)
self.anonymous = kwargs.get('anonymous', None)
class UIPromptFlowConnectionSelector(msrest.serialization.Model):
"""UIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(UIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = kwargs.get('prompt_flow_connection_type', None)
class UIWidgetMetaInfo(msrest.serialization.Model):
"""UIWidgetMetaInfo.
:ivar module_node_id:
:vartype module_node_id: str
:ivar meta_module_id:
:vartype meta_module_id: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'meta_module_id': {'key': 'metaModuleId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword meta_module_id:
:paramtype meta_module_id: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
super(UIWidgetMetaInfo, self).__init__(**kwargs)
self.module_node_id = kwargs.get('module_node_id', None)
self.meta_module_id = kwargs.get('meta_module_id', None)
self.parameter_name = kwargs.get('parameter_name', None)
self.ui_widget_type = kwargs.get('ui_widget_type', None)
class UIYamlEditor(msrest.serialization.Model):
"""UIYamlEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIYamlEditor, self).__init__(**kwargs)
self.json_schema = kwargs.get('json_schema', None)
class UnversionedEntityRequestDto(msrest.serialization.Model):
"""UnversionedEntityRequestDto.
:ivar unversioned_entity_ids:
:vartype unversioned_entity_ids: list[str]
"""
_attribute_map = {
'unversioned_entity_ids': {'key': 'unversionedEntityIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
"""
:keyword unversioned_entity_ids:
:paramtype unversioned_entity_ids: list[str]
"""
super(UnversionedEntityRequestDto, self).__init__(**kwargs)
self.unversioned_entity_ids = kwargs.get('unversioned_entity_ids', None)
class UnversionedEntityResponseDto(msrest.serialization.Model):
"""UnversionedEntityResponseDto.
:ivar unversioned_entities:
:vartype unversioned_entities: list[~flow.models.FlowIndexEntity]
:ivar unversioned_entity_json_schema: Anything.
:vartype unversioned_entity_json_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'unversioned_entities': {'key': 'unversionedEntities', 'type': '[FlowIndexEntity]'},
'unversioned_entity_json_schema': {'key': 'unversionedEntityJsonSchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword unversioned_entities:
:paramtype unversioned_entities: list[~flow.models.FlowIndexEntity]
:keyword unversioned_entity_json_schema: Anything.
:paramtype unversioned_entity_json_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedEntityResponseDto, self).__init__(**kwargs)
self.unversioned_entities = kwargs.get('unversioned_entities', None)
self.unversioned_entity_json_schema = kwargs.get('unversioned_entity_json_schema', None)
self.normalized_request_charge = kwargs.get('normalized_request_charge', None)
self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None)
class UnversionedRebuildIndexDto(msrest.serialization.Model):
"""UnversionedRebuildIndexDto.
:ivar continuation_token:
:vartype continuation_token: str
:ivar entity_count:
:vartype entity_count: int
:ivar entity_container_type:
:vartype entity_container_type: str
:ivar entity_type:
:vartype entity_type: str
:ivar resource_id:
:vartype resource_id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar immutable_resource_id:
:vartype immutable_resource_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'entity_count': {'key': 'entityCount', 'type': 'int'},
'entity_container_type': {'key': 'entityContainerType', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'immutable_resource_id': {'key': 'immutableResourceId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
"""
:keyword continuation_token:
:paramtype continuation_token: str
:keyword entity_count:
:paramtype entity_count: int
:keyword entity_container_type:
:paramtype entity_container_type: str
:keyword entity_type:
:paramtype entity_type: str
:keyword resource_id:
:paramtype resource_id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword immutable_resource_id:
:paramtype immutable_resource_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(UnversionedRebuildIndexDto, self).__init__(**kwargs)
self.continuation_token = kwargs.get('continuation_token', None)
self.entity_count = kwargs.get('entity_count', None)
self.entity_container_type = kwargs.get('entity_container_type', None)
self.entity_type = kwargs.get('entity_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.workspace_id = kwargs.get('workspace_id', None)
self.immutable_resource_id = kwargs.get('immutable_resource_id', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
class UnversionedRebuildResponseDto(msrest.serialization.Model):
"""UnversionedRebuildResponseDto.
:ivar entities:
:vartype entities: ~flow.models.SegmentedResult1
:ivar unversioned_entity_schema: Anything.
:vartype unversioned_entity_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'entities': {'key': 'entities', 'type': 'SegmentedResult1'},
'unversioned_entity_schema': {'key': 'unversionedEntitySchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword entities:
:paramtype entities: ~flow.models.SegmentedResult1
:keyword unversioned_entity_schema: Anything.
:paramtype unversioned_entity_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedRebuildResponseDto, self).__init__(**kwargs)
self.entities = kwargs.get('entities', None)
self.unversioned_entity_schema = kwargs.get('unversioned_entity_schema', None)
self.normalized_request_charge = kwargs.get('normalized_request_charge', None)
self.normalized_request_charge_period = kwargs.get('normalized_request_charge_period', None)
class UpdateComponentRequest(msrest.serialization.Model):
"""UpdateComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:ivar module_version:
:vartype module_version: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'module_update_operation_type': {'key': 'moduleUpdateOperationType', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:keyword module_version:
:paramtype module_version: str
"""
super(UpdateComponentRequest, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.tags = kwargs.get('tags', None)
self.module_update_operation_type = kwargs.get('module_update_operation_type', None)
self.module_version = kwargs.get('module_version', None)
class UpdateFlowRequest(msrest.serialization.Model):
"""UpdateFlowRequest.
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(UpdateFlowRequest, self).__init__(**kwargs)
self.flow_run_result = kwargs.get('flow_run_result', None)
self.flow_test_mode = kwargs.get('flow_test_mode', None)
self.flow_test_infos = kwargs.get('flow_test_infos', None)
self.flow_name = kwargs.get('flow_name', None)
self.description = kwargs.get('description', None)
self.details = kwargs.get('details', None)
self.tags = kwargs.get('tags', None)
self.flow = kwargs.get('flow', None)
self.flow_definition_file_path = kwargs.get('flow_definition_file_path', None)
self.flow_type = kwargs.get('flow_type', None)
self.flow_run_settings = kwargs.get('flow_run_settings', None)
self.is_archived = kwargs.get('is_archived', None)
self.vm_size = kwargs.get('vm_size', None)
self.max_idle_time_seconds = kwargs.get('max_idle_time_seconds', None)
self.identity = kwargs.get('identity', None)
class UpdateFlowRuntimeRequest(msrest.serialization.Model):
"""UpdateFlowRuntimeRequest.
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
"""
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(UpdateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_description = kwargs.get('runtime_description', None)
self.environment = kwargs.get('environment', None)
self.instance_count = kwargs.get('instance_count', None)
class UpdateRegistryComponentRequest(msrest.serialization.Model):
"""UpdateRegistryComponentRequest.
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The
default value is None.
:vartype update_type: str
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: The only acceptable values to pass in are None and "SetDefaultVersion".
The default value is None.
:paramtype update_type: str
"""
super(UpdateRegistryComponentRequest, self).__init__(**kwargs)
self.registry_name = kwargs.get('registry_name', None)
self.component_name = kwargs.get('component_name', None)
self.component_version = kwargs.get('component_version', None)
self.update_type = kwargs.get('update_type', None)
class UploadOptions(msrest.serialization.Model):
"""UploadOptions.
:ivar overwrite:
:vartype overwrite: bool
:ivar source_globs:
:vartype source_globs: ~flow.models.ExecutionGlobsOptions
"""
_attribute_map = {
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'source_globs': {'key': 'sourceGlobs', 'type': 'ExecutionGlobsOptions'},
}
def __init__(
self,
**kwargs
):
"""
:keyword overwrite:
:paramtype overwrite: bool
:keyword source_globs:
:paramtype source_globs: ~flow.models.ExecutionGlobsOptions
"""
super(UploadOptions, self).__init__(**kwargs)
self.overwrite = kwargs.get('overwrite', None)
self.source_globs = kwargs.get('source_globs', None)
class UriReference(msrest.serialization.Model):
"""UriReference.
:ivar path:
:vartype path: str
:ivar is_file:
:vartype is_file: bool
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword is_file:
:paramtype is_file: bool
"""
super(UriReference, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.is_file = kwargs.get('is_file', None)
class User(msrest.serialization.Model):
"""User.
:ivar user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:vartype user_object_id: str
:ivar user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:vartype user_pu_id: str
:ivar user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:vartype user_idp: str
:ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:vartype user_alt_sec_id: str
:ivar user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:vartype user_iss: str
:ivar user_tenant_id: A user or service principal's tenant ID.
:vartype user_tenant_id: str
:ivar user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:vartype user_name: str
:ivar upn: A user's Principal name (upn)
This is PII andshould never be logged.
:vartype upn: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_pu_id': {'key': 'userPuId', 'type': 'str'},
'user_idp': {'key': 'userIdp', 'type': 'str'},
'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
'user_iss': {'key': 'userIss', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'upn': {'key': 'upn', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:paramtype user_object_id: str
:keyword user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:paramtype user_pu_id: str
:keyword user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:paramtype user_idp: str
:keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:paramtype user_alt_sec_id: str
:keyword user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:paramtype user_iss: str
:keyword user_tenant_id: A user or service principal's tenant ID.
:paramtype user_tenant_id: str
:keyword user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:paramtype user_name: str
:keyword upn: A user's Principal name (upn)
This is PII andshould never be logged.
:paramtype upn: str
"""
super(User, self).__init__(**kwargs)
self.user_object_id = kwargs.get('user_object_id', None)
self.user_pu_id = kwargs.get('user_pu_id', None)
self.user_idp = kwargs.get('user_idp', None)
self.user_alt_sec_id = kwargs.get('user_alt_sec_id', None)
self.user_iss = kwargs.get('user_iss', None)
self.user_tenant_id = kwargs.get('user_tenant_id', None)
self.user_name = kwargs.get('user_name', None)
self.upn = kwargs.get('upn', None)
class UserAssignedIdentity(msrest.serialization.Model):
"""UserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(UserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = kwargs.get('principal_id', None)
self.client_id = kwargs.get('client_id', None)
class ValidationDataSettings(msrest.serialization.Model):
"""ValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.NCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.NCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(ValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = kwargs.get('n_cross_validations', None)
self.validation_data_size = kwargs.get('validation_data_size', None)
self.cv_split_column_names = kwargs.get('cv_split_column_names', None)
self.validation_type = kwargs.get('validation_type', None)
class VariantNode(msrest.serialization.Model):
"""VariantNode.
:ivar node:
:vartype node: ~flow.models.Node
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'Node'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.Node
:keyword description:
:paramtype description: str
"""
super(VariantNode, self).__init__(**kwargs)
self.node = kwargs.get('node', None)
self.description = kwargs.get('description', None)
class Webhook(msrest.serialization.Model):
"""Webhook.
:ivar webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:vartype webhook_type: str
:ivar event_type:
:vartype event_type: str
"""
_attribute_map = {
'webhook_type': {'key': 'webhookType', 'type': 'str'},
'event_type': {'key': 'eventType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:paramtype webhook_type: str
:keyword event_type:
:paramtype event_type: str
"""
super(Webhook, self).__init__(**kwargs)
self.webhook_type = kwargs.get('webhook_type', None)
self.event_type = kwargs.get('event_type', None)
class WebServiceComputeMetaInfo(msrest.serialization.Model):
"""WebServiceComputeMetaInfo.
:ivar node_count:
:vartype node_count: int
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar aks_not_found:
:vartype aks_not_found: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'aks_not_found': {'key': 'aksNotFound', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword aks_not_found:
:paramtype aks_not_found: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(WebServiceComputeMetaInfo, self).__init__(**kwargs)
self.node_count = kwargs.get('node_count', None)
self.is_ssl_enabled = kwargs.get('is_ssl_enabled', None)
self.aks_not_found = kwargs.get('aks_not_found', None)
self.cluster_purpose = kwargs.get('cluster_purpose', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.vm_size = kwargs.get('vm_size', None)
self.location = kwargs.get('location', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.state = kwargs.get('state', None)
self.os_type = kwargs.get('os_type', None)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.created_by_studio = kwargs.get('created_by_studio', None)
self.is_gpu_type = kwargs.get('is_gpu_type', None)
self.resource_id = kwargs.get('resource_id', None)
self.compute_type = kwargs.get('compute_type', None)
class WebServicePort(msrest.serialization.Model):
"""WebServicePort.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar name:
:vartype name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword name:
:paramtype name: str
"""
super(WebServicePort, self).__init__(**kwargs)
self.node_id = kwargs.get('node_id', None)
self.port_name = kwargs.get('port_name', None)
self.name = kwargs.get('name', None)
class WorkspaceConnectionSpec(msrest.serialization.Model):
"""WorkspaceConnectionSpec.
:ivar connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:vartype connection_category: str or ~flow.models.ConnectionCategory
:ivar flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype flow_value_type: str or ~flow.models.ValueType
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_type_display_name:
:vartype connection_type_display_name: str
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
:ivar module:
:vartype module: str
"""
_attribute_map = {
'connection_category': {'key': 'connectionCategory', 'type': 'str'},
'flow_value_type': {'key': 'flowValueType', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_type_display_name': {'key': 'connectionTypeDisplayName', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
"""
:keyword connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:paramtype connection_category: str or ~flow.models.ConnectionCategory
:keyword flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype flow_value_type: str or ~flow.models.ValueType
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_type_display_name:
:paramtype connection_type_display_name: str
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
:keyword module:
:paramtype module: str
"""
super(WorkspaceConnectionSpec, self).__init__(**kwargs)
self.connection_category = kwargs.get('connection_category', None)
self.flow_value_type = kwargs.get('flow_value_type', None)
self.connection_type = kwargs.get('connection_type', None)
self.connection_type_display_name = kwargs.get('connection_type_display_name', None)
self.config_specs = kwargs.get('config_specs', None)
self.module = kwargs.get('module', None)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_models_py3.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Dict, IO, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._azure_machine_learning_designer_service_client_enums import *
class ACIAdvanceSettings(msrest.serialization.Model):
"""ACIAdvanceSettings.
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar ssl_enabled:
:vartype ssl_enabled: bool
:ivar ssl_certificate:
:vartype ssl_certificate: str
:ivar ssl_key:
:vartype ssl_key: str
:ivar c_name:
:vartype c_name: str
:ivar dns_name_label:
:vartype dns_name_label: str
"""
_attribute_map = {
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
'ssl_key': {'key': 'sslKey', 'type': 'str'},
'c_name': {'key': 'cName', 'type': 'str'},
'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
}
def __init__(
self,
*,
container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
app_insights_enabled: Optional[bool] = None,
ssl_enabled: Optional[bool] = None,
ssl_certificate: Optional[str] = None,
ssl_key: Optional[str] = None,
c_name: Optional[str] = None,
dns_name_label: Optional[str] = None,
**kwargs
):
"""
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword ssl_enabled:
:paramtype ssl_enabled: bool
:keyword ssl_certificate:
:paramtype ssl_certificate: str
:keyword ssl_key:
:paramtype ssl_key: str
:keyword c_name:
:paramtype c_name: str
:keyword dns_name_label:
:paramtype dns_name_label: str
"""
super(ACIAdvanceSettings, self).__init__(**kwargs)
self.container_resource_requirements = container_resource_requirements
self.app_insights_enabled = app_insights_enabled
self.ssl_enabled = ssl_enabled
self.ssl_certificate = ssl_certificate
self.ssl_key = ssl_key
self.c_name = c_name
self.dns_name_label = dns_name_label
class Activate(msrest.serialization.Model):
"""Activate.
:ivar when:
:vartype when: str
:ivar is_property: Anything.
:vartype is_property: any
"""
_attribute_map = {
'when': {'key': 'when', 'type': 'str'},
'is_property': {'key': 'is', 'type': 'object'},
}
def __init__(
self,
*,
when: Optional[str] = None,
is_property: Optional[Any] = None,
**kwargs
):
"""
:keyword when:
:paramtype when: str
:keyword is_property: Anything.
:paramtype is_property: any
"""
super(Activate, self).__init__(**kwargs)
self.when = when
self.is_property = is_property
class AdditionalErrorInfo(msrest.serialization.Model):
"""AdditionalErrorInfo.
:ivar type:
:vartype type: str
:ivar info: Anything.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
*,
type: Optional[str] = None,
info: Optional[Any] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword info: Anything.
:paramtype info: any
"""
super(AdditionalErrorInfo, self).__init__(**kwargs)
self.type = type
self.info = info
class AdhocTriggerScheduledCommandJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledCommandJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
*,
job_name: Optional[str] = None,
job_display_name: Optional[str] = None,
trigger_time_string: Optional[str] = None,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledCommandJobRequest, self).__init__(**kwargs)
self.job_name = job_name
self.job_display_name = job_display_name
self.trigger_time_string = trigger_time_string
class AdhocTriggerScheduledSparkJobRequest(msrest.serialization.Model):
"""AdhocTriggerScheduledSparkJobRequest.
:ivar job_name:
:vartype job_name: str
:ivar job_display_name:
:vartype job_display_name: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
"""
_attribute_map = {
'job_name': {'key': 'jobName', 'type': 'str'},
'job_display_name': {'key': 'jobDisplayName', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
}
def __init__(
self,
*,
job_name: Optional[str] = None,
job_display_name: Optional[str] = None,
trigger_time_string: Optional[str] = None,
**kwargs
):
"""
:keyword job_name:
:paramtype job_name: str
:keyword job_display_name:
:paramtype job_display_name: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
"""
super(AdhocTriggerScheduledSparkJobRequest, self).__init__(**kwargs)
self.job_name = job_name
self.job_display_name = job_display_name
self.trigger_time_string = trigger_time_string
class AetherAmlDataset(msrest.serialization.Model):
"""AetherAmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
registered_data_set_reference: Optional["AetherRegisteredDataSetReference"] = None,
saved_data_set_reference: Optional["AetherSavedDataSetReference"] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherAmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = registered_data_set_reference
self.saved_data_set_reference = saved_data_set_reference
self.additional_transformations = additional_transformations
class AetherAmlSparkCloudSetting(msrest.serialization.Model):
"""AetherAmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.AetherEntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.AetherResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'AetherEntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'AetherResourcesSetting'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
}
def __init__(
self,
*,
entry: Optional["AetherEntrySetting"] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
driver_memory: Optional[str] = None,
driver_cores: Optional[int] = None,
executor_memory: Optional[str] = None,
executor_cores: Optional[int] = None,
number_executors: Optional[int] = None,
environment_asset_id: Optional[str] = None,
environment_variables: Optional[Dict[str, str]] = None,
inline_environment_definition_string: Optional[str] = None,
conf: Optional[Dict[str, str]] = None,
compute: Optional[str] = None,
resources: Optional["AetherResourcesSetting"] = None,
identity: Optional["AetherIdentitySetting"] = None,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.AetherEntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.AetherResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
"""
super(AetherAmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = entry
self.files = files
self.archives = archives
self.jars = jars
self.py_files = py_files
self.driver_memory = driver_memory
self.driver_cores = driver_cores
self.executor_memory = executor_memory
self.executor_cores = executor_cores
self.number_executors = number_executors
self.environment_asset_id = environment_asset_id
self.environment_variables = environment_variables
self.inline_environment_definition_string = inline_environment_definition_string
self.conf = conf
self.compute = compute
self.resources = resources
self.identity = identity
class AetherAPCloudConfiguration(msrest.serialization.Model):
"""AetherAPCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
}
def __init__(
self,
*,
referenced_ap_module_guid: Optional[str] = None,
user_alias: Optional[str] = None,
aether_module_type: Optional[str] = None,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
"""
super(AetherAPCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = referenced_ap_module_guid
self.user_alias = user_alias
self.aether_module_type = aether_module_type
class AetherArgumentAssignment(msrest.serialization.Model):
"""AetherArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.AetherArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[AetherArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
*,
value_type: Optional[Union[str, "AetherArgumentValueType"]] = None,
value: Optional[str] = None,
nested_argument_list: Optional[List["AetherArgumentAssignment"]] = None,
string_interpolation_argument_list: Optional[List["AetherArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.AetherArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.AetherArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherArgumentAssignment, self).__init__(**kwargs)
self.value_type = value_type
self.value = value
self.nested_argument_list = nested_argument_list
self.string_interpolation_argument_list = string_interpolation_argument_list
class AetherAssetDefinition(msrest.serialization.Model):
"""AetherAssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar initial_asset_id:
:vartype initial_asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'initial_asset_id': {'key': 'initialAssetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
*,
path: Optional[str] = None,
type: Optional[Union[str, "AetherAssetType"]] = None,
asset_id: Optional[str] = None,
initial_asset_id: Optional[str] = None,
serialized_asset_id: Optional[str] = None,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword initial_asset_id:
:paramtype initial_asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AetherAssetDefinition, self).__init__(**kwargs)
self.path = path
self.type = type
self.asset_id = asset_id
self.initial_asset_id = initial_asset_id
self.serialized_asset_id = serialized_asset_id
class AetherAssetOutputSettings(msrest.serialization.Model):
"""AetherAssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AetherAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'AetherParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
path: Optional[str] = None,
path_parameter_assignment: Optional["AetherParameterAssignment"] = None,
type: Optional[Union[str, "AetherAssetType"]] = None,
options: Optional[Dict[str, str]] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AetherAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherAssetOutputSettings, self).__init__(**kwargs)
self.path = path
self.path_parameter_assignment = path_parameter_assignment
self.type = type
self.options = options
self.data_store_mode = data_store_mode
self.name = name
self.version = version
class AetherAutoFeaturizeConfiguration(msrest.serialization.Model):
"""AetherAutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'AetherFeaturizationSettings'},
}
def __init__(
self,
*,
featurization_config: Optional["AetherFeaturizationSettings"] = None,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.AetherFeaturizationSettings
"""
super(AetherAutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = featurization_config
class AetherAutoMLComponentConfiguration(msrest.serialization.Model):
"""AetherAutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AetherAutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AetherAutoFeaturizeConfiguration'},
}
def __init__(
self,
*,
auto_train_config: Optional["AetherAutoTrainConfiguration"] = None,
auto_featurize_config: Optional["AetherAutoFeaturizeConfiguration"] = None,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AetherAutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AetherAutoFeaturizeConfiguration
"""
super(AetherAutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = auto_train_config
self.auto_featurize_config = auto_featurize_config
class AetherAutoTrainConfiguration(msrest.serialization.Model):
"""AetherAutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.AetherGeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.AetherLimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.AetherDataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.AetherForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.AetherTrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.AetherSweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AetherComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AetherResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'AetherGeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'AetherLimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'AetherDataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'AetherForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'AetherTrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'AetherSweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AetherComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AetherResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
*,
general_settings: Optional["AetherGeneralSettings"] = None,
limit_settings: Optional["AetherLimitSettings"] = None,
data_settings: Optional["AetherDataSettings"] = None,
forecasting_settings: Optional["AetherForecastingSettings"] = None,
training_settings: Optional["AetherTrainingSettings"] = None,
sweep_settings: Optional["AetherSweepSettings"] = None,
image_model_settings: Optional[Dict[str, Any]] = None,
properties: Optional[Dict[str, str]] = None,
compute_configuration: Optional["AetherComputeConfiguration"] = None,
resource_configurtion: Optional["AetherResourceConfiguration"] = None,
environment_id: Optional[str] = None,
environment_variables: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.AetherGeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.AetherLimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.AetherDataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.AetherForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.AetherTrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.AetherSweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AetherComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AetherResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AetherAutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = general_settings
self.limit_settings = limit_settings
self.data_settings = data_settings
self.forecasting_settings = forecasting_settings
self.training_settings = training_settings
self.sweep_settings = sweep_settings
self.image_model_settings = image_model_settings
self.properties = properties
self.compute_configuration = compute_configuration
self.resource_configurtion = resource_configurtion
self.environment_id = environment_id
self.environment_variables = environment_variables
class AetherAzureBlobReference(msrest.serialization.Model):
"""AetherAzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
container: Optional[str] = None,
sas_token: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureBlobReference, self).__init__(**kwargs)
self.container = container
self.sas_token = sas_token
self.uri = uri
self.account = account
self.relative_path = relative_path
self.path_type = path_type
self.aml_data_store_name = aml_data_store_name
class AetherAzureDatabaseReference(msrest.serialization.Model):
"""AetherAzureDatabaseReference.
:ivar server_uri:
:vartype server_uri: str
:ivar database_name:
:vartype database_name: str
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'server_uri': {'key': 'serverUri', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
server_uri: Optional[str] = None,
database_name: Optional[str] = None,
table_name: Optional[str] = None,
sql_query: Optional[str] = None,
stored_procedure_name: Optional[str] = None,
stored_procedure_parameters: Optional[List["AetherStoredProcedureParameter"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword server_uri:
:paramtype server_uri: str
:keyword database_name:
:paramtype database_name: str
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDatabaseReference, self).__init__(**kwargs)
self.server_uri = server_uri
self.database_name = database_name
self.table_name = table_name
self.sql_query = sql_query
self.stored_procedure_name = stored_procedure_name
self.stored_procedure_parameters = stored_procedure_parameters
self.aml_data_store_name = aml_data_store_name
class AetherAzureDataLakeGen2Reference(msrest.serialization.Model):
"""AetherAzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
file_system_name: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = file_system_name
self.uri = uri
self.account = account
self.relative_path = relative_path
self.path_type = path_type
self.aml_data_store_name = aml_data_store_name
class AetherAzureDataLakeReference(msrest.serialization.Model):
"""AetherAzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar data_lake_uri:
:vartype data_lake_uri: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'data_lake_uri': {'key': 'dataLakeUri', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
tenant: Optional[str] = None,
subscription: Optional[str] = None,
resource_group: Optional[str] = None,
data_lake_uri: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword data_lake_uri:
:paramtype data_lake_uri: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureDataLakeReference, self).__init__(**kwargs)
self.tenant = tenant
self.subscription = subscription
self.resource_group = resource_group
self.data_lake_uri = data_lake_uri
self.uri = uri
self.account = account
self.relative_path = relative_path
self.path_type = path_type
self.aml_data_store_name = aml_data_store_name
class AetherAzureFilesReference(msrest.serialization.Model):
"""AetherAzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar path_type: Possible values include: "Unknown", "File", "Folder".
:vartype path_type: str or ~flow.models.AetherFileBasedPathType
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'path_type': {'key': 'pathType', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
share: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
path_type: Optional[Union[str, "AetherFileBasedPathType"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword path_type: Possible values include: "Unknown", "File", "Folder".
:paramtype path_type: str or ~flow.models.AetherFileBasedPathType
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherAzureFilesReference, self).__init__(**kwargs)
self.share = share
self.uri = uri
self.account = account
self.relative_path = relative_path
self.path_type = path_type
self.aml_data_store_name = aml_data_store_name
class AetherBatchAiComputeInfo(msrest.serialization.Model):
"""AetherBatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
*,
batch_ai_subscription_id: Optional[str] = None,
batch_ai_resource_group: Optional[str] = None,
batch_ai_workspace_name: Optional[str] = None,
cluster_name: Optional[str] = None,
native_shared_directory: Optional[str] = None,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(AetherBatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = batch_ai_subscription_id
self.batch_ai_resource_group = batch_ai_resource_group
self.batch_ai_workspace_name = batch_ai_workspace_name
self.cluster_name = cluster_name
self.native_shared_directory = native_shared_directory
class AetherBuildArtifactInfo(msrest.serialization.Model):
"""AetherBuildArtifactInfo.
:ivar type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:vartype type: str or ~flow.models.AetherBuildSourceType
:ivar cloud_build_drop_path_info:
:vartype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:ivar vso_build_artifact_info:
:vartype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'cloud_build_drop_path_info': {'key': 'cloudBuildDropPathInfo', 'type': 'AetherCloudBuildDropPathInfo'},
'vso_build_artifact_info': {'key': 'vsoBuildArtifactInfo', 'type': 'AetherVsoBuildArtifactInfo'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AetherBuildSourceType"]] = None,
cloud_build_drop_path_info: Optional["AetherCloudBuildDropPathInfo"] = None,
vso_build_artifact_info: Optional["AetherVsoBuildArtifactInfo"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "CloudBuild", "Vso", "VsoGit".
:paramtype type: str or ~flow.models.AetherBuildSourceType
:keyword cloud_build_drop_path_info:
:paramtype cloud_build_drop_path_info: ~flow.models.AetherCloudBuildDropPathInfo
:keyword vso_build_artifact_info:
:paramtype vso_build_artifact_info: ~flow.models.AetherVsoBuildArtifactInfo
"""
super(AetherBuildArtifactInfo, self).__init__(**kwargs)
self.type = type
self.cloud_build_drop_path_info = cloud_build_drop_path_info
self.vso_build_artifact_info = vso_build_artifact_info
class AetherCloudBuildDropPathInfo(msrest.serialization.Model):
"""AetherCloudBuildDropPathInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherCloudBuildInfo
:ivar root:
:vartype root: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherCloudBuildInfo'},
'root': {'key': 'root', 'type': 'str'},
}
def __init__(
self,
*,
build_info: Optional["AetherCloudBuildInfo"] = None,
root: Optional[str] = None,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherCloudBuildInfo
:keyword root:
:paramtype root: str
"""
super(AetherCloudBuildDropPathInfo, self).__init__(**kwargs)
self.build_info = build_info
self.root = root
class AetherCloudBuildInfo(msrest.serialization.Model):
"""AetherCloudBuildInfo.
:ivar queue_info:
:vartype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:ivar build_id:
:vartype build_id: str
:ivar drop_url:
:vartype drop_url: str
"""
_attribute_map = {
'queue_info': {'key': 'queueInfo', 'type': 'AetherCloudBuildQueueInfo'},
'build_id': {'key': 'buildId', 'type': 'str'},
'drop_url': {'key': 'dropUrl', 'type': 'str'},
}
def __init__(
self,
*,
queue_info: Optional["AetherCloudBuildQueueInfo"] = None,
build_id: Optional[str] = None,
drop_url: Optional[str] = None,
**kwargs
):
"""
:keyword queue_info:
:paramtype queue_info: ~flow.models.AetherCloudBuildQueueInfo
:keyword build_id:
:paramtype build_id: str
:keyword drop_url:
:paramtype drop_url: str
"""
super(AetherCloudBuildInfo, self).__init__(**kwargs)
self.queue_info = queue_info
self.build_id = build_id
self.drop_url = drop_url
class AetherCloudBuildQueueInfo(msrest.serialization.Model):
"""AetherCloudBuildQueueInfo.
:ivar build_queue:
:vartype build_queue: str
:ivar build_role:
:vartype build_role: str
"""
_attribute_map = {
'build_queue': {'key': 'buildQueue', 'type': 'str'},
'build_role': {'key': 'buildRole', 'type': 'str'},
}
def __init__(
self,
*,
build_queue: Optional[str] = None,
build_role: Optional[str] = None,
**kwargs
):
"""
:keyword build_queue:
:paramtype build_queue: str
:keyword build_role:
:paramtype build_role: str
"""
super(AetherCloudBuildQueueInfo, self).__init__(**kwargs)
self.build_queue = build_queue
self.build_role = build_role
class AetherCloudPrioritySetting(msrest.serialization.Model):
"""AetherCloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.AetherPriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'AetherPriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'AetherPriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'AetherPriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'AetherPriorityConfiguration'},
}
def __init__(
self,
*,
scope_priority: Optional["AetherPriorityConfiguration"] = None,
aml_compute_priority: Optional["AetherPriorityConfiguration"] = None,
itp_priority: Optional["AetherPriorityConfiguration"] = None,
singularity_priority: Optional["AetherPriorityConfiguration"] = None,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.AetherPriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.AetherPriorityConfiguration
"""
super(AetherCloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = scope_priority
self.aml_compute_priority = aml_compute_priority
self.itp_priority = itp_priority
self.singularity_priority = singularity_priority
class AetherCloudSettings(msrest.serialization.Model):
"""AetherCloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.AetherParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.AetherPriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[AetherParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'AetherPriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'AetherHdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'AetherSubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AetherAutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'AetherAPCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'AetherScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'AetherEsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'AetherDataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AetherAmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'AetherDataTransferV2CloudSetting'},
}
def __init__(
self,
*,
linked_settings: Optional[List["AetherParameterAssignment"]] = None,
priority_config: Optional["AetherPriorityConfiguration"] = None,
hdi_run_config: Optional["AetherHdiRunConfiguration"] = None,
sub_graph_config: Optional["AetherSubGraphConfiguration"] = None,
auto_ml_component_config: Optional["AetherAutoMLComponentConfiguration"] = None,
ap_cloud_config: Optional["AetherAPCloudConfiguration"] = None,
scope_cloud_config: Optional["AetherScopeCloudConfiguration"] = None,
es_cloud_config: Optional["AetherEsCloudConfiguration"] = None,
data_transfer_cloud_config: Optional["AetherDataTransferCloudConfiguration"] = None,
aml_spark_cloud_setting: Optional["AetherAmlSparkCloudSetting"] = None,
data_transfer_v2_cloud_setting: Optional["AetherDataTransferV2CloudSetting"] = None,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.AetherParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.AetherPriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.AetherHdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.AetherSubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AetherAutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.AetherAPCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.AetherScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.AetherEsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.AetherDataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AetherAmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.AetherDataTransferV2CloudSetting
"""
super(AetherCloudSettings, self).__init__(**kwargs)
self.linked_settings = linked_settings
self.priority_config = priority_config
self.hdi_run_config = hdi_run_config
self.sub_graph_config = sub_graph_config
self.auto_ml_component_config = auto_ml_component_config
self.ap_cloud_config = ap_cloud_config
self.scope_cloud_config = scope_cloud_config
self.es_cloud_config = es_cloud_config
self.data_transfer_cloud_config = data_transfer_cloud_config
self.aml_spark_cloud_setting = aml_spark_cloud_setting
self.data_transfer_v2_cloud_setting = data_transfer_v2_cloud_setting
class AetherColumnTransformer(msrest.serialization.Model):
"""AetherColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
*,
fields: Optional[List[str]] = None,
parameters: Optional[Any] = None,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(AetherColumnTransformer, self).__init__(**kwargs)
self.fields = fields
self.parameters = parameters
class AetherComputeConfiguration(msrest.serialization.Model):
"""AetherComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
*,
target: Optional[str] = None,
instance_count: Optional[int] = None,
is_local: Optional[bool] = None,
location: Optional[str] = None,
is_clusterless: Optional[bool] = None,
instance_type: Optional[str] = None,
properties: Optional[Dict[str, Any]] = None,
is_preemptable: Optional[bool] = None,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AetherComputeConfiguration, self).__init__(**kwargs)
self.target = target
self.instance_count = instance_count
self.is_local = is_local
self.location = location
self.is_clusterless = is_clusterless
self.instance_type = instance_type
self.properties = properties
self.is_preemptable = is_preemptable
class AetherComputeSetting(msrest.serialization.Model):
"""AetherComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.AetherComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'AetherBatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'AetherRemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'AetherHdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'AetherMlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'AetherDatabricksComputeInfo'},
}
def __init__(
self,
*,
name: Optional[str] = None,
compute_type: Optional[Union[str, "AetherComputeType"]] = None,
batch_ai_compute_info: Optional["AetherBatchAiComputeInfo"] = None,
remote_docker_compute_info: Optional["AetherRemoteDockerComputeInfo"] = None,
hdi_cluster_compute_info: Optional["AetherHdiClusterComputeInfo"] = None,
mlc_compute_info: Optional["AetherMlcComputeInfo"] = None,
databricks_compute_info: Optional["AetherDatabricksComputeInfo"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.AetherComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.AetherBatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.AetherRemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.AetherHdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.AetherMlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.AetherDatabricksComputeInfo
"""
super(AetherComputeSetting, self).__init__(**kwargs)
self.name = name
self.compute_type = compute_type
self.batch_ai_compute_info = batch_ai_compute_info
self.remote_docker_compute_info = remote_docker_compute_info
self.hdi_cluster_compute_info = hdi_cluster_compute_info
self.mlc_compute_info = mlc_compute_info
self.databricks_compute_info = databricks_compute_info
class AetherControlInput(msrest.serialization.Model):
"""AetherControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.AetherControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
default_value: Optional[Union[str, "AetherControlInputValue"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.AetherControlInputValue
"""
super(AetherControlInput, self).__init__(**kwargs)
self.name = name
self.default_value = default_value
class AetherControlOutput(msrest.serialization.Model):
"""AetherControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(AetherControlOutput, self).__init__(**kwargs)
self.name = name
class AetherCopyDataTask(msrest.serialization.Model):
"""AetherCopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
*,
data_copy_mode: Optional[Union[str, "AetherDataCopyMode"]] = None,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherCopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = data_copy_mode
class AetherCosmosReference(msrest.serialization.Model):
"""AetherCosmosReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
cluster: Optional[str] = None,
vc: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCosmosReference, self).__init__(**kwargs)
self.cluster = cluster
self.vc = vc
self.relative_path = relative_path
class AetherCreatedBy(msrest.serialization.Model):
"""AetherCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar puid:
:vartype puid: str
:ivar iss:
:vartype iss: str
:ivar idp:
:vartype idp: str
:ivar altsec_id:
:vartype altsec_id: str
:ivar source_ip:
:vartype source_ip: str
:ivar skip_registry_private_link_check:
:vartype skip_registry_private_link_check: bool
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'puid': {'key': 'puid', 'type': 'str'},
'iss': {'key': 'iss', 'type': 'str'},
'idp': {'key': 'idp', 'type': 'str'},
'altsec_id': {'key': 'altsecId', 'type': 'str'},
'source_ip': {'key': 'sourceIp', 'type': 'str'},
'skip_registry_private_link_check': {'key': 'skipRegistryPrivateLinkCheck', 'type': 'bool'},
}
def __init__(
self,
*,
user_object_id: Optional[str] = None,
user_tenant_id: Optional[str] = None,
user_name: Optional[str] = None,
puid: Optional[str] = None,
iss: Optional[str] = None,
idp: Optional[str] = None,
altsec_id: Optional[str] = None,
source_ip: Optional[str] = None,
skip_registry_private_link_check: Optional[bool] = None,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword puid:
:paramtype puid: str
:keyword iss:
:paramtype iss: str
:keyword idp:
:paramtype idp: str
:keyword altsec_id:
:paramtype altsec_id: str
:keyword source_ip:
:paramtype source_ip: str
:keyword skip_registry_private_link_check:
:paramtype skip_registry_private_link_check: bool
"""
super(AetherCreatedBy, self).__init__(**kwargs)
self.user_object_id = user_object_id
self.user_tenant_id = user_tenant_id
self.user_name = user_name
self.puid = puid
self.iss = iss
self.idp = idp
self.altsec_id = altsec_id
self.source_ip = source_ip
self.skip_registry_private_link_check = skip_registry_private_link_check
class AetherCustomReference(msrest.serialization.Model):
"""AetherCustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherCustomReference, self).__init__(**kwargs)
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
class AetherDatabaseSink(msrest.serialization.Model):
"""AetherDatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
table: Optional[str] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(AetherDatabaseSink, self).__init__(**kwargs)
self.connection = connection
self.table = table
class AetherDatabaseSource(msrest.serialization.Model):
"""AetherDatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
query: Optional[str] = None,
stored_procedure_name: Optional[str] = None,
stored_procedure_parameters: Optional[List["AetherStoredProcedureParameter"]] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherDatabaseSource, self).__init__(**kwargs)
self.connection = connection
self.query = query
self.stored_procedure_name = stored_procedure_name
self.stored_procedure_parameters = stored_procedure_parameters
class AetherDatabricksComputeInfo(msrest.serialization.Model):
"""AetherDatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
*,
existing_cluster_id: Optional[str] = None,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(AetherDatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = existing_cluster_id
class AetherDataLocation(msrest.serialization.Model):
"""AetherDataLocation.
:ivar storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.AetherDataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.AetherDataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AetherAmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
:ivar is_compliant:
:vartype is_compliant: bool
:ivar reuse_calculation_fields:
:vartype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'AetherDataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AetherAmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
'is_compliant': {'key': 'isCompliant', 'type': 'bool'},
'reuse_calculation_fields': {'key': 'reuseCalculationFields', 'type': 'AetherDataLocationReuseCalculationFields'},
}
def __init__(
self,
*,
storage_type: Optional[Union[str, "AetherDataLocationStorageType"]] = None,
storage_id: Optional[str] = None,
uri: Optional[str] = None,
data_store_name: Optional[str] = None,
data_reference: Optional["AetherDataReference"] = None,
aml_dataset: Optional["AetherAmlDataset"] = None,
asset_definition: Optional["AetherAssetDefinition"] = None,
is_compliant: Optional[bool] = None,
reuse_calculation_fields: Optional["AetherDataLocationReuseCalculationFields"] = None,
**kwargs
):
"""
:keyword storage_type: Possible values include: "Cosmos", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.AetherDataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.AetherDataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AetherAmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
:keyword is_compliant:
:paramtype is_compliant: bool
:keyword reuse_calculation_fields:
:paramtype reuse_calculation_fields: ~flow.models.AetherDataLocationReuseCalculationFields
"""
super(AetherDataLocation, self).__init__(**kwargs)
self.storage_type = storage_type
self.storage_id = storage_id
self.uri = uri
self.data_store_name = data_store_name
self.data_reference = data_reference
self.aml_dataset = aml_dataset
self.asset_definition = asset_definition
self.is_compliant = is_compliant
self.reuse_calculation_fields = reuse_calculation_fields
class AetherDataLocationReuseCalculationFields(msrest.serialization.Model):
"""AetherDataLocationReuseCalculationFields.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar data_experiment_id:
:vartype data_experiment_id: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'data_experiment_id': {'key': 'dataExperimentId', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
data_experiment_id: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword data_experiment_id:
:paramtype data_experiment_id: str
"""
super(AetherDataLocationReuseCalculationFields, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.relative_path = relative_path
self.data_experiment_id = data_experiment_id
class AetherDataPath(msrest.serialization.Model):
"""AetherDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.AetherSqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'AetherSqlDataPath'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
sql_data_path: Optional["AetherSqlDataPath"] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.AetherSqlDataPath
"""
super(AetherDataPath, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.relative_path = relative_path
self.sql_data_path = sql_data_path
class AetherDataReference(msrest.serialization.Model):
"""AetherDataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:vartype type: str or ~flow.models.AetherDataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AetherAzureFilesReference
:ivar cosmos_reference:
:vartype cosmos_reference: ~flow.models.AetherCosmosReference
:ivar philly_hdfs_reference:
:vartype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.AetherDBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.AetherCustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.AetherHdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AetherAzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AetherAzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AetherAzureFilesReference'},
'cosmos_reference': {'key': 'cosmosReference', 'type': 'AetherCosmosReference'},
'philly_hdfs_reference': {'key': 'phillyHdfsReference', 'type': 'AetherPhillyHdfsReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AetherAzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'AetherDBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AetherAzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'AetherCustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'AetherHdfsReference'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AetherDataReferenceType"]] = None,
azure_blob_reference: Optional["AetherAzureBlobReference"] = None,
azure_data_lake_reference: Optional["AetherAzureDataLakeReference"] = None,
azure_files_reference: Optional["AetherAzureFilesReference"] = None,
cosmos_reference: Optional["AetherCosmosReference"] = None,
philly_hdfs_reference: Optional["AetherPhillyHdfsReference"] = None,
azure_sql_database_reference: Optional["AetherAzureDatabaseReference"] = None,
azure_postgres_database_reference: Optional["AetherAzureDatabaseReference"] = None,
azure_data_lake_gen2_reference: Optional["AetherAzureDataLakeGen2Reference"] = None,
dbfs_reference: Optional["AetherDBFSReference"] = None,
azure_my_sql_database_reference: Optional["AetherAzureDatabaseReference"] = None,
custom_reference: Optional["AetherCustomReference"] = None,
hdfs_reference: Optional["AetherHdfsReference"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"Cosmos", "PhillyHdfs", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2",
"DBFS", "AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype type: str or ~flow.models.AetherDataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AetherAzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AetherAzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AetherAzureFilesReference
:keyword cosmos_reference:
:paramtype cosmos_reference: ~flow.models.AetherCosmosReference
:keyword philly_hdfs_reference:
:paramtype philly_hdfs_reference: ~flow.models.AetherPhillyHdfsReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AetherAzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.AetherDBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AetherAzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.AetherCustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.AetherHdfsReference
"""
super(AetherDataReference, self).__init__(**kwargs)
self.type = type
self.azure_blob_reference = azure_blob_reference
self.azure_data_lake_reference = azure_data_lake_reference
self.azure_files_reference = azure_files_reference
self.cosmos_reference = cosmos_reference
self.philly_hdfs_reference = philly_hdfs_reference
self.azure_sql_database_reference = azure_sql_database_reference
self.azure_postgres_database_reference = azure_postgres_database_reference
self.azure_data_lake_gen2_reference = azure_data_lake_gen2_reference
self.dbfs_reference = dbfs_reference
self.azure_my_sql_database_reference = azure_my_sql_database_reference
self.custom_reference = custom_reference
self.hdfs_reference = hdfs_reference
class AetherDataSetDefinition(msrest.serialization.Model):
"""AetherDataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.AetherDataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'AetherDataSetDefinitionValue'},
}
def __init__(
self,
*,
data_type_short_name: Optional[str] = None,
parameter_name: Optional[str] = None,
value: Optional["AetherDataSetDefinitionValue"] = None,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.AetherDataSetDefinitionValue
"""
super(AetherDataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = data_type_short_name
self.parameter_name = parameter_name
self.value = value
class AetherDataSetDefinitionValue(msrest.serialization.Model):
"""AetherDataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.AetherDataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AetherAssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'AetherDataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'AetherRegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'AetherSavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AetherAssetDefinition'},
}
def __init__(
self,
*,
literal_value: Optional["AetherDataPath"] = None,
data_set_reference: Optional["AetherRegisteredDataSetReference"] = None,
saved_data_set_reference: Optional["AetherSavedDataSetReference"] = None,
asset_definition: Optional["AetherAssetDefinition"] = None,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.AetherDataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.AetherRegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.AetherSavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AetherAssetDefinition
"""
super(AetherDataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = literal_value
self.data_set_reference = data_set_reference
self.saved_data_set_reference = saved_data_set_reference
self.asset_definition = asset_definition
class AetherDatasetOutput(msrest.serialization.Model):
"""AetherDatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.AetherDatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
}
def __init__(
self,
*,
dataset_type: Optional[Union[str, "AetherDatasetType"]] = None,
dataset_registration: Optional["AetherDatasetRegistration"] = None,
dataset_output_options: Optional["AetherDatasetOutputOptions"] = None,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.AetherDatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
"""
super(AetherDatasetOutput, self).__init__(**kwargs)
self.dataset_type = dataset_type
self.dataset_registration = dataset_registration
self.dataset_output_options = dataset_output_options
class AetherDatasetOutputOptions(msrest.serialization.Model):
"""AetherDatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.AetherGlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'AetherGlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
*,
source_globs: Optional["AetherGlobsOptions"] = None,
path_on_datastore: Optional[str] = None,
path_on_datastore_parameter_assignment: Optional["AetherParameterAssignment"] = None,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.AetherGlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.AetherParameterAssignment
"""
super(AetherDatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = source_globs
self.path_on_datastore = path_on_datastore
self.path_on_datastore_parameter_assignment = path_on_datastore_parameter_assignment
class AetherDatasetRegistration(msrest.serialization.Model):
"""AetherDatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
create_new_version: Optional[bool] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherDatasetRegistration, self).__init__(**kwargs)
self.name = name
self.create_new_version = create_new_version
self.description = description
self.tags = tags
self.additional_transformations = additional_transformations
class AetherDataSettings(msrest.serialization.Model):
"""AetherDataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.AetherValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.AetherTestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'AetherValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'AetherTestDataSettings'},
}
def __init__(
self,
*,
target_column_name: Optional[str] = None,
weight_column_name: Optional[str] = None,
positive_label: Optional[str] = None,
validation_data: Optional["AetherValidationDataSettings"] = None,
test_data: Optional["AetherTestDataSettings"] = None,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.AetherValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.AetherTestDataSettings
"""
super(AetherDataSettings, self).__init__(**kwargs)
self.target_column_name = target_column_name
self.weight_column_name = weight_column_name
self.positive_label = positive_label
self.validation_data = validation_data
self.test_data = test_data
class AetherDatastoreSetting(msrest.serialization.Model):
"""AetherDatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(AetherDatastoreSetting, self).__init__(**kwargs)
self.data_store_name = data_store_name
class AetherDataTransferCloudConfiguration(msrest.serialization.Model):
"""AetherDataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
*,
allow_overwrite: Optional[bool] = None,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(AetherDataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = allow_overwrite
class AetherDataTransferSink(msrest.serialization.Model):
"""AetherDataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.AetherDatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'AetherDatabaseSink'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AetherDataTransferStorageType"]] = None,
file_system: Optional["AetherFileSystem"] = None,
database_sink: Optional["AetherDatabaseSink"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.AetherDatabaseSink
"""
super(AetherDataTransferSink, self).__init__(**kwargs)
self.type = type
self.file_system = file_system
self.database_sink = database_sink
class AetherDataTransferSource(msrest.serialization.Model):
"""AetherDataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.AetherDataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.AetherFileSystem
:ivar database_source:
:vartype database_source: ~flow.models.AetherDatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'AetherFileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'AetherDatabaseSource'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AetherDataTransferStorageType"]] = None,
file_system: Optional["AetherFileSystem"] = None,
database_source: Optional["AetherDatabaseSource"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.AetherDataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.AetherFileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.AetherDatabaseSource
"""
super(AetherDataTransferSource, self).__init__(**kwargs)
self.type = type
self.file_system = file_system
self.database_source = database_source
class AetherDataTransferV2CloudSetting(msrest.serialization.Model):
"""AetherDataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.AetherDataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.AetherCopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.AetherImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.AetherExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'AetherCopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'AetherImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'AetherExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{AetherDataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{AetherDataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
*,
task_type: Optional[Union[str, "AetherDataTransferTaskType"]] = None,
compute_name: Optional[str] = None,
copy_data_task: Optional["AetherCopyDataTask"] = None,
import_data_task: Optional["AetherImportDataTask"] = None,
export_data_task: Optional["AetherExportDataTask"] = None,
data_transfer_sources: Optional[Dict[str, "AetherDataTransferSource"]] = None,
data_transfer_sinks: Optional[Dict[str, "AetherDataTransferSink"]] = None,
data_copy_mode: Optional[Union[str, "AetherDataCopyMode"]] = None,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.AetherDataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.AetherCopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.AetherImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.AetherExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.AetherDataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.AetherDataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.AetherDataCopyMode
"""
super(AetherDataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = task_type
self.compute_name = compute_name
self.copy_data_task = copy_data_task
self.import_data_task = import_data_task
self.export_data_task = export_data_task
self.data_transfer_sources = data_transfer_sources
self.data_transfer_sinks = data_transfer_sinks
self.data_copy_mode = data_copy_mode
class AetherDBFSReference(msrest.serialization.Model):
"""AetherDBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AetherDBFSReference, self).__init__(**kwargs)
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class AetherDockerSettingConfiguration(msrest.serialization.Model):
"""AetherDockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
*,
use_docker: Optional[bool] = None,
shared_volumes: Optional[bool] = None,
shm_size: Optional[str] = None,
arguments: Optional[List[str]] = None,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(AetherDockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = use_docker
self.shared_volumes = shared_volumes
self.shm_size = shm_size
self.arguments = arguments
class AetherDoWhileControlFlowInfo(msrest.serialization.Model):
"""AetherDoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'AetherDoWhileControlFlowRunSettings'},
}
def __init__(
self,
*,
output_port_name_to_input_port_names_mapping: Optional[Dict[str, List[str]]] = None,
condition_output_port_name: Optional[str] = None,
run_settings: Optional["AetherDoWhileControlFlowRunSettings"] = None,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1f2aigm·schemas·aetherdowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.AetherDoWhileControlFlowRunSettings
"""
super(AetherDoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = output_port_name_to_input_port_names_mapping
self.condition_output_port_name = condition_output_port_name
self.run_settings = run_settings
class AetherDoWhileControlFlowRunSettings(msrest.serialization.Model):
"""AetherDoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
*,
max_loop_iteration_count: Optional["AetherParameterAssignment"] = None,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.AetherParameterAssignment
"""
super(AetherDoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = max_loop_iteration_count
class AetherEntityInterfaceDocumentation(msrest.serialization.Model):
"""AetherEntityInterfaceDocumentation.
:ivar inputs_documentation: Dictionary of :code:`<string>`.
:vartype inputs_documentation: dict[str, str]
:ivar outputs_documentation: Dictionary of :code:`<string>`.
:vartype outputs_documentation: dict[str, str]
:ivar parameters_documentation: Dictionary of :code:`<string>`.
:vartype parameters_documentation: dict[str, str]
"""
_attribute_map = {
'inputs_documentation': {'key': 'inputsDocumentation', 'type': '{str}'},
'outputs_documentation': {'key': 'outputsDocumentation', 'type': '{str}'},
'parameters_documentation': {'key': 'parametersDocumentation', 'type': '{str}'},
}
def __init__(
self,
*,
inputs_documentation: Optional[Dict[str, str]] = None,
outputs_documentation: Optional[Dict[str, str]] = None,
parameters_documentation: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword inputs_documentation: Dictionary of :code:`<string>`.
:paramtype inputs_documentation: dict[str, str]
:keyword outputs_documentation: Dictionary of :code:`<string>`.
:paramtype outputs_documentation: dict[str, str]
:keyword parameters_documentation: Dictionary of :code:`<string>`.
:paramtype parameters_documentation: dict[str, str]
"""
super(AetherEntityInterfaceDocumentation, self).__init__(**kwargs)
self.inputs_documentation = inputs_documentation
self.outputs_documentation = outputs_documentation
self.parameters_documentation = parameters_documentation
class AetherEntrySetting(msrest.serialization.Model):
"""AetherEntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
class_name: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(AetherEntrySetting, self).__init__(**kwargs)
self.file = file
self.class_name = class_name
class AetherEnvironmentConfiguration(msrest.serialization.Model):
"""AetherEnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
version: Optional[str] = None,
use_environment_definition: Optional[bool] = None,
environment_definition_string: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(AetherEnvironmentConfiguration, self).__init__(**kwargs)
self.name = name
self.version = version
self.use_environment_definition = use_environment_definition
self.environment_definition_string = environment_definition_string
class AetherEsCloudConfiguration(msrest.serialization.Model):
"""AetherEsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar aml_compute_priority_internal:
:vartype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar itp_priority_internal:
:vartype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar singularity_priority_internal:
:vartype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:ivar environment:
:vartype environment: ~flow.models.AetherEnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.AetherK8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AetherResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.AetherDockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.AetherIdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'aml_compute_priority_internal': {'key': 'amlComputePriorityInternal', 'type': 'AetherPriorityConfiguration'},
'itp_priority_internal': {'key': 'itpPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'singularity_priority_internal': {'key': 'singularityPriorityInternal', 'type': 'AetherPriorityConfiguration'},
'environment': {'key': 'environment', 'type': 'AetherEnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'AetherHyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'AetherK8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AetherResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'AetherTorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'AetherTargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'AetherDockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'AetherIdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
*,
enable_output_to_file_based_on_data_type_id: Optional[bool] = None,
aml_compute_priority_internal: Optional["AetherPriorityConfiguration"] = None,
itp_priority_internal: Optional["AetherPriorityConfiguration"] = None,
singularity_priority_internal: Optional["AetherPriorityConfiguration"] = None,
environment: Optional["AetherEnvironmentConfiguration"] = None,
hyper_drive_configuration: Optional["AetherHyperDriveConfiguration"] = None,
k8_s_config: Optional["AetherK8SConfiguration"] = None,
resource_config: Optional["AetherResourceConfiguration"] = None,
torch_distributed_config: Optional["AetherTorchDistributedConfiguration"] = None,
target_selector_config: Optional["AetherTargetSelectorConfiguration"] = None,
docker_config: Optional["AetherDockerSettingConfiguration"] = None,
environment_variables: Optional[Dict[str, str]] = None,
max_run_duration_seconds: Optional[int] = None,
identity: Optional["AetherIdentitySetting"] = None,
application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None,
run_config: Optional[str] = None,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword aml_compute_priority_internal:
:paramtype aml_compute_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword itp_priority_internal:
:paramtype itp_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword singularity_priority_internal:
:paramtype singularity_priority_internal: ~flow.models.AetherPriorityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.AetherEnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.AetherHyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.AetherK8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AetherResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.AetherTorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.AetherTargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.AetherDockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.AetherIdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(AetherEsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = enable_output_to_file_based_on_data_type_id
self.aml_compute_priority_internal = aml_compute_priority_internal
self.itp_priority_internal = itp_priority_internal
self.singularity_priority_internal = singularity_priority_internal
self.environment = environment
self.hyper_drive_configuration = hyper_drive_configuration
self.k8_s_config = k8_s_config
self.resource_config = resource_config
self.torch_distributed_config = torch_distributed_config
self.target_selector_config = target_selector_config
self.docker_config = docker_config
self.environment_variables = environment_variables
self.max_run_duration_seconds = max_run_duration_seconds
self.identity = identity
self.application_endpoints = application_endpoints
self.run_config = run_config
class AetherExportDataTask(msrest.serialization.Model):
"""AetherExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'AetherDataTransferSink'},
}
def __init__(
self,
*,
data_transfer_sink: Optional["AetherDataTransferSink"] = None,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.AetherDataTransferSink
"""
super(AetherExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = data_transfer_sink
class AetherFeaturizationSettings(msrest.serialization.Model):
"""AetherFeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.AetherFeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[AetherColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherFeaturizationMode"]] = None,
blocked_transformers: Optional[List[str]] = None,
column_purposes: Optional[Dict[str, str]] = None,
drop_columns: Optional[List[str]] = None,
transformer_params: Optional[Dict[str, List["AetherColumnTransformer"]]] = None,
dataset_language: Optional[str] = None,
enable_dnn_featurization: Optional[bool] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.AetherFeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1y90i4m·schemas·aetherfeaturizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.AetherColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(AetherFeaturizationSettings, self).__init__(**kwargs)
self.mode = mode
self.blocked_transformers = blocked_transformers
self.column_purposes = column_purposes
self.drop_columns = drop_columns
self.transformer_params = transformer_params
self.dataset_language = dataset_language
self.enable_dnn_featurization = enable_dnn_featurization
class AetherFileSystem(msrest.serialization.Model):
"""AetherFileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(AetherFileSystem, self).__init__(**kwargs)
self.connection = connection
self.path = path
class AetherForecastHorizon(msrest.serialization.Model):
"""AetherForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherForecastHorizonMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(AetherForecastHorizon, self).__init__(**kwargs)
self.mode = mode
self.value = value
class AetherForecastingSettings(msrest.serialization.Model):
"""AetherForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.AetherTargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.AetherForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.AetherSeasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.AetherUseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'AetherTargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'AetherTargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'AetherForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'AetherSeasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
*,
country_or_region_for_holidays: Optional[str] = None,
time_column_name: Optional[str] = None,
target_lags: Optional["AetherTargetLags"] = None,
target_rolling_window_size: Optional["AetherTargetRollingWindowSize"] = None,
forecast_horizon: Optional["AetherForecastHorizon"] = None,
time_series_id_column_names: Optional[List[str]] = None,
frequency: Optional[str] = None,
feature_lags: Optional[str] = None,
seasonality: Optional["AetherSeasonality"] = None,
short_series_handling_config: Optional[Union[str, "AetherShortSeriesHandlingConfiguration"]] = None,
use_stl: Optional[Union[str, "AetherUseStl"]] = None,
target_aggregate_function: Optional[Union[str, "AetherTargetAggregationFunction"]] = None,
cv_step_size: Optional[int] = None,
features_unknown_at_forecast_time: Optional[List[str]] = None,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.AetherTargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.AetherTargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.AetherForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.AetherSeasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or
~flow.models.AetherShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.AetherUseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.AetherTargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(AetherForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = country_or_region_for_holidays
self.time_column_name = time_column_name
self.target_lags = target_lags
self.target_rolling_window_size = target_rolling_window_size
self.forecast_horizon = forecast_horizon
self.time_series_id_column_names = time_series_id_column_names
self.frequency = frequency
self.feature_lags = feature_lags
self.seasonality = seasonality
self.short_series_handling_config = short_series_handling_config
self.use_stl = use_stl
self.target_aggregate_function = target_aggregate_function
self.cv_step_size = cv_step_size
self.features_unknown_at_forecast_time = features_unknown_at_forecast_time
class AetherGeneralSettings(msrest.serialization.Model):
"""AetherGeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.AetherTaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
*,
primary_metric: Optional[Union[str, "AetherPrimaryMetrics"]] = None,
task_type: Optional[Union[str, "AetherTaskType"]] = None,
log_verbosity: Optional[Union[str, "AetherLogVerbosity"]] = None,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.AetherPrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.AetherTaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.AetherLogVerbosity
"""
super(AetherGeneralSettings, self).__init__(**kwargs)
self.primary_metric = primary_metric
self.task_type = task_type
self.log_verbosity = log_verbosity
class AetherGlobsOptions(msrest.serialization.Model):
"""AetherGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
*,
glob_patterns: Optional[List[str]] = None,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(AetherGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = glob_patterns
class AetherGraphControlNode(msrest.serialization.Model):
"""AetherGraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.AetherParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'AetherParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
control_type: Optional[str] = None,
control_parameter: Optional["AetherParameterAssignment"] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.AetherParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlNode, self).__init__(**kwargs)
self.id = id
self.control_type = control_type
self.control_parameter = control_parameter
self.run_attribution = run_attribution
class AetherGraphControlReferenceNode(msrest.serialization.Model):
"""AetherGraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.AetherControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'AetherDoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'AetherParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
comment: Optional[str] = None,
control_flow_type: Optional[Union[str, "AetherControlFlowType"]] = None,
reference_node_id: Optional[str] = None,
do_while_control_flow_info: Optional["AetherDoWhileControlFlowInfo"] = None,
parallel_for_control_flow_info: Optional["AetherParallelForControlFlowInfo"] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.AetherControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.AetherDoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.AetherParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphControlReferenceNode, self).__init__(**kwargs)
self.id = id
self.name = name
self.comment = comment
self.control_flow_type = control_flow_type
self.reference_node_id = reference_node_id
self.do_while_control_flow_info = do_while_control_flow_info
self.parallel_for_control_flow_info = parallel_for_control_flow_info
self.run_attribution = run_attribution
class AetherGraphDatasetNode(msrest.serialization.Model):
"""AetherGraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'AetherDataSetDefinition'},
}
def __init__(
self,
*,
id: Optional[str] = None,
dataset_id: Optional[str] = None,
data_path_parameter_name: Optional[str] = None,
data_set_definition: Optional["AetherDataSetDefinition"] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.AetherDataSetDefinition
"""
super(AetherGraphDatasetNode, self).__init__(**kwargs)
self.id = id
self.dataset_id = dataset_id
self.data_path_parameter_name = data_path_parameter_name
self.data_set_definition = data_set_definition
class AetherGraphEdge(msrest.serialization.Model):
"""AetherGraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.AetherPortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.AetherPortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'AetherPortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'AetherPortInfo'},
}
def __init__(
self,
*,
source_output_port: Optional["AetherPortInfo"] = None,
destination_input_port: Optional["AetherPortInfo"] = None,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.AetherPortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.AetherPortInfo
"""
super(AetherGraphEdge, self).__init__(**kwargs)
self.source_output_port = source_output_port
self.destination_input_port = destination_input_port
class AetherGraphEntity(msrest.serialization.Model):
"""AetherGraphEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.AetherGraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.AetherGraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.AetherGraphEdge]
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[AetherGraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[AetherGraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[AetherGraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[AetherGraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[AetherGraphControlNode]'},
'edges': {'key': 'edges', 'type': '[AetherGraphEdge]'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
*,
module_nodes: Optional[List["AetherGraphModuleNode"]] = None,
dataset_nodes: Optional[List["AetherGraphDatasetNode"]] = None,
sub_graph_nodes: Optional[List["AetherGraphReferenceNode"]] = None,
control_reference_nodes: Optional[List["AetherGraphControlReferenceNode"]] = None,
control_nodes: Optional[List["AetherGraphControlNode"]] = None,
edges: Optional[List["AetherGraphEdge"]] = None,
default_compute: Optional["AetherComputeSetting"] = None,
default_datastore: Optional["AetherDatastoreSetting"] = None,
default_cloud_priority: Optional["AetherCloudPrioritySetting"] = None,
parent_sub_graph_module_ids: Optional[List[str]] = None,
id: Optional[str] = None,
workspace_id: Optional[str] = None,
etag: Optional[str] = None,
tags: Optional[List[str]] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
entity_status: Optional[Union[str, "AetherEntityStatus"]] = None,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.AetherGraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.AetherGraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.AetherGraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.AetherGraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.AetherGraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.AetherGraphEdge]
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherGraphEntity, self).__init__(**kwargs)
self.module_nodes = module_nodes
self.dataset_nodes = dataset_nodes
self.sub_graph_nodes = sub_graph_nodes
self.control_reference_nodes = control_reference_nodes
self.control_nodes = control_nodes
self.edges = edges
self.default_compute = default_compute
self.default_datastore = default_datastore
self.default_cloud_priority = default_cloud_priority
self.parent_sub_graph_module_ids = parent_sub_graph_module_ids
self.id = id
self.workspace_id = workspace_id
self.etag = etag
self.tags = tags
self.created_date = created_date
self.last_modified_date = last_modified_date
self.entity_status = entity_status
class AetherGraphModuleNode(msrest.serialization.Model):
"""AetherGraphModuleNode.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar default_data_retention_hint:
:vartype default_data_retention_hint: int
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar euclid_workspace_id:
:vartype euclid_workspace_id: str
:ivar attached_modules:
:vartype attached_modules: list[str]
:ivar acceptable_machine_clusters:
:vartype acceptable_machine_clusters: list[str]
:ivar custom_data_location_id:
:vartype custom_data_location_id: str
:ivar alert_timeout_duration:
:vartype alert_timeout_duration: str
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'default_data_retention_hint': {'key': 'defaultDataRetentionHint', 'type': 'int'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'euclid_workspace_id': {'key': 'euclidWorkspaceId', 'type': 'str'},
'attached_modules': {'key': 'attachedModules', 'type': '[str]'},
'acceptable_machine_clusters': {'key': 'acceptableMachineClusters', 'type': '[str]'},
'custom_data_location_id': {'key': 'customDataLocationId', 'type': 'str'},
'alert_timeout_duration': {'key': 'alertTimeoutDuration', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
cloud_priority: Optional[int] = None,
default_data_retention_hint: Optional[int] = None,
compliance_cluster: Optional[str] = None,
euclid_workspace_id: Optional[str] = None,
attached_modules: Optional[List[str]] = None,
acceptable_machine_clusters: Optional[List[str]] = None,
custom_data_location_id: Optional[str] = None,
alert_timeout_duration: Optional[str] = None,
runconfig: Optional[str] = None,
id: Optional[str] = None,
module_id: Optional[str] = None,
comment: Optional[str] = None,
name: Optional[str] = None,
module_parameters: Optional[List["AetherParameterAssignment"]] = None,
module_metadata_parameters: Optional[List["AetherParameterAssignment"]] = None,
module_output_settings: Optional[List["AetherOutputSetting"]] = None,
module_input_settings: Optional[List["AetherInputSetting"]] = None,
use_graph_default_compute: Optional[bool] = None,
use_graph_default_datastore: Optional[bool] = None,
regenerate_output: Optional[bool] = None,
control_inputs: Optional[List["AetherControlInput"]] = None,
cloud_settings: Optional["AetherCloudSettings"] = None,
execution_phase: Optional[Union[str, "AetherExecutionPhase"]] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword default_data_retention_hint:
:paramtype default_data_retention_hint: int
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword euclid_workspace_id:
:paramtype euclid_workspace_id: str
:keyword attached_modules:
:paramtype attached_modules: list[str]
:keyword acceptable_machine_clusters:
:paramtype acceptable_machine_clusters: list[str]
:keyword custom_data_location_id:
:paramtype custom_data_location_id: str
:keyword alert_timeout_duration:
:paramtype alert_timeout_duration: str
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphModuleNode, self).__init__(**kwargs)
self.cloud_priority = cloud_priority
self.default_data_retention_hint = default_data_retention_hint
self.compliance_cluster = compliance_cluster
self.euclid_workspace_id = euclid_workspace_id
self.attached_modules = attached_modules
self.acceptable_machine_clusters = acceptable_machine_clusters
self.custom_data_location_id = custom_data_location_id
self.alert_timeout_duration = alert_timeout_duration
self.runconfig = runconfig
self.id = id
self.module_id = module_id
self.comment = comment
self.name = name
self.module_parameters = module_parameters
self.module_metadata_parameters = module_metadata_parameters
self.module_output_settings = module_output_settings
self.module_input_settings = module_input_settings
self.use_graph_default_compute = use_graph_default_compute
self.use_graph_default_datastore = use_graph_default_datastore
self.regenerate_output = regenerate_output
self.control_inputs = control_inputs
self.cloud_settings = cloud_settings
self.execution_phase = execution_phase
self.run_attribution = run_attribution
class AetherGraphReferenceNode(msrest.serialization.Model):
"""AetherGraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.AetherComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.AetherDatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.AetherOutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.AetherInputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.AetherControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.AetherExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'AetherComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'AetherDatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[AetherParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[AetherParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[AetherOutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[AetherInputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[AetherControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
graph_id: Optional[str] = None,
default_compute: Optional["AetherComputeSetting"] = None,
default_datastore: Optional["AetherDatastoreSetting"] = None,
id: Optional[str] = None,
module_id: Optional[str] = None,
comment: Optional[str] = None,
name: Optional[str] = None,
module_parameters: Optional[List["AetherParameterAssignment"]] = None,
module_metadata_parameters: Optional[List["AetherParameterAssignment"]] = None,
module_output_settings: Optional[List["AetherOutputSetting"]] = None,
module_input_settings: Optional[List["AetherInputSetting"]] = None,
use_graph_default_compute: Optional[bool] = None,
use_graph_default_datastore: Optional[bool] = None,
regenerate_output: Optional[bool] = None,
control_inputs: Optional[List["AetherControlInput"]] = None,
cloud_settings: Optional["AetherCloudSettings"] = None,
execution_phase: Optional[Union[str, "AetherExecutionPhase"]] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.AetherComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.AetherDatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.AetherParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.AetherOutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.AetherInputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.AetherControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.AetherExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(AetherGraphReferenceNode, self).__init__(**kwargs)
self.graph_id = graph_id
self.default_compute = default_compute
self.default_datastore = default_datastore
self.id = id
self.module_id = module_id
self.comment = comment
self.name = name
self.module_parameters = module_parameters
self.module_metadata_parameters = module_metadata_parameters
self.module_output_settings = module_output_settings
self.module_input_settings = module_input_settings
self.use_graph_default_compute = use_graph_default_compute
self.use_graph_default_datastore = use_graph_default_datastore
self.regenerate_output = regenerate_output
self.control_inputs = control_inputs
self.cloud_settings = cloud_settings
self.execution_phase = execution_phase
self.run_attribution = run_attribution
class AetherHdfsReference(msrest.serialization.Model):
"""AetherHdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherHdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
class AetherHdiClusterComputeInfo(msrest.serialization.Model):
"""AetherHdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
*,
address: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
private_key: Optional[str] = None,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherHdiClusterComputeInfo, self).__init__(**kwargs)
self.address = address
self.username = username
self.password = password
self.private_key = private_key
class AetherHdiRunConfiguration(msrest.serialization.Model):
"""AetherHdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
class_name: Optional[str] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
compute_name: Optional[str] = None,
queue: Optional[str] = None,
driver_memory: Optional[str] = None,
driver_cores: Optional[int] = None,
executor_memory: Optional[str] = None,
executor_cores: Optional[int] = None,
number_executors: Optional[int] = None,
conf: Optional[Dict[str, str]] = None,
name: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(AetherHdiRunConfiguration, self).__init__(**kwargs)
self.file = file
self.class_name = class_name
self.files = files
self.archives = archives
self.jars = jars
self.py_files = py_files
self.compute_name = compute_name
self.queue = queue
self.driver_memory = driver_memory
self.driver_cores = driver_cores
self.executor_memory = executor_memory
self.executor_cores = executor_cores
self.number_executors = number_executors
self.conf = conf
self.name = name
class AetherHyperDriveConfiguration(msrest.serialization.Model):
"""AetherHyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
*,
hyper_drive_run_config: Optional[str] = None,
primary_metric_goal: Optional[str] = None,
primary_metric_name: Optional[str] = None,
arguments: Optional[List["AetherArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherHyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = hyper_drive_run_config
self.primary_metric_goal = primary_metric_goal
self.primary_metric_name = primary_metric_name
self.arguments = arguments
class AetherIdentitySetting(msrest.serialization.Model):
"""AetherIdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AetherIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AetherIdentityType"]] = None,
client_id: Optional[str] = None,
object_id: Optional[str] = None,
msi_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AetherIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(AetherIdentitySetting, self).__init__(**kwargs)
self.type = type
self.client_id = client_id
self.object_id = object_id
self.msi_resource_id = msi_resource_id
class AetherImportDataTask(msrest.serialization.Model):
"""AetherImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'AetherDataTransferSource'},
}
def __init__(
self,
*,
data_transfer_source: Optional["AetherDataTransferSource"] = None,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.AetherDataTransferSource
"""
super(AetherImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = data_transfer_source
class AetherInputSetting(msrest.serialization.Model):
"""AetherInputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
options: Optional[Dict[str, str]] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherInputSetting, self).__init__(**kwargs)
self.name = name
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.options = options
self.additional_transformations = additional_transformations
class AetherInteractiveConfig(msrest.serialization.Model):
"""AetherInteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
*,
is_ssh_enabled: Optional[bool] = None,
ssh_public_key: Optional[str] = None,
is_i_python_enabled: Optional[bool] = None,
is_tensor_board_enabled: Optional[bool] = None,
interactive_port: Optional[int] = None,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(AetherInteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = is_ssh_enabled
self.ssh_public_key = ssh_public_key
self.is_i_python_enabled = is_i_python_enabled
self.is_tensor_board_enabled = is_tensor_board_enabled
self.interactive_port = interactive_port
class AetherK8SConfiguration(msrest.serialization.Model):
"""AetherK8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.AetherResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AetherPriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'AetherResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AetherPriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'AetherInteractiveConfig'},
}
def __init__(
self,
*,
max_retry_count: Optional[int] = None,
resource_configuration: Optional["AetherResourceConfig"] = None,
priority_configuration: Optional["AetherPriorityConfig"] = None,
interactive_configuration: Optional["AetherInteractiveConfig"] = None,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.AetherResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AetherPriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.AetherInteractiveConfig
"""
super(AetherK8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = max_retry_count
self.resource_configuration = resource_configuration
self.priority_configuration = priority_configuration
self.interactive_configuration = interactive_configuration
class AetherLegacyDataPath(msrest.serialization.Model):
"""AetherLegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherLegacyDataPath, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.data_store_mode = data_store_mode
self.relative_path = relative_path
class AetherLimitSettings(msrest.serialization.Model):
"""AetherLimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
*,
max_trials: Optional[int] = None,
timeout: Optional[str] = None,
trial_timeout: Optional[str] = None,
max_concurrent_trials: Optional[int] = None,
max_cores_per_trial: Optional[int] = None,
exit_score: Optional[float] = None,
enable_early_termination: Optional[bool] = None,
max_nodes: Optional[int] = None,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(AetherLimitSettings, self).__init__(**kwargs)
self.max_trials = max_trials
self.timeout = timeout
self.trial_timeout = trial_timeout
self.max_concurrent_trials = max_concurrent_trials
self.max_cores_per_trial = max_cores_per_trial
self.exit_score = exit_score
self.enable_early_termination = enable_early_termination
self.max_nodes = max_nodes
class AetherMlcComputeInfo(msrest.serialization.Model):
"""AetherMlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
*,
mlc_compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(AetherMlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = mlc_compute_type
class AetherModuleEntity(msrest.serialization.Model):
"""AetherModuleEntity.
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.AetherCreatedBy
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.AetherModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar resource_requirements:
:vartype resource_requirements: ~flow.models.AetherResourceModel
:ivar machine_cluster:
:vartype machine_cluster: list[str]
:ivar default_compliance_cluster:
:vartype default_compliance_cluster: str
:ivar repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:vartype repository_type: str or ~flow.models.AetherRepositoryType
:ivar relative_path_to_source_code:
:vartype relative_path_to_source_code: str
:ivar commit_id:
:vartype commit_id: str
:ivar code_review_link:
:vartype code_review_link: str
:ivar unit_tests_available:
:vartype unit_tests_available: bool
:ivar is_compressed:
:vartype is_compressed: bool
:ivar execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:vartype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:ivar is_output_markup_enabled:
:vartype is_output_markup_enabled: bool
:ivar docker_image_id:
:vartype docker_image_id: str
:ivar docker_image_reference:
:vartype docker_image_reference: str
:ivar docker_image_security_groups:
:vartype docker_image_security_groups: str
:ivar extended_properties:
:vartype extended_properties: ~flow.models.AetherModuleExtendedProperties
:ivar deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:vartype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:ivar deployment_source_metadata:
:vartype deployment_source_metadata: str
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar kv_tags: This is a dictionary.
:vartype kv_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.AetherCreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.AetherCloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.AetherUploadState
:ivar source_code_location:
:vartype source_code_location: str
:ivar size_in_bytes:
:vartype size_in_bytes: long
:ivar download_location:
:vartype download_location: str
:ivar data_location:
:vartype data_location: ~flow.models.AetherDataLocation
:ivar scripting_runtime_id:
:vartype scripting_runtime_id: str
:ivar interface_documentation:
:vartype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:ivar is_eyes_on:
:vartype is_eyes_on: bool
:ivar compliance_cluster:
:vartype compliance_cluster: str
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar information_url:
:vartype information_url: str
:ivar is_experiment_id_in_parameters:
:vartype is_experiment_id_in_parameters: bool
:ivar interface_string:
:vartype interface_string: str
:ivar default_parameters: This is a dictionary.
:vartype default_parameters: dict[str, str]
:ivar structured_interface:
:vartype structured_interface: ~flow.models.AetherStructuredInterface
:ivar family_id:
:vartype family_id: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar version:
:vartype version: str
:ivar sequence_number_in_family:
:vartype sequence_number_in_family: int
:ivar owner:
:vartype owner: str
:ivar azure_tenant_id:
:vartype azure_tenant_id: str
:ivar azure_user_id:
:vartype azure_user_id: str
:ivar collaborators:
:vartype collaborators: list[str]
:ivar id:
:vartype id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar etag:
:vartype etag: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.AetherEntityStatus
"""
_attribute_map = {
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'AetherCreatedBy'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'resource_requirements': {'key': 'resourceRequirements', 'type': 'AetherResourceModel'},
'machine_cluster': {'key': 'machineCluster', 'type': '[str]'},
'default_compliance_cluster': {'key': 'defaultComplianceCluster', 'type': 'str'},
'repository_type': {'key': 'repositoryType', 'type': 'str'},
'relative_path_to_source_code': {'key': 'relativePathToSourceCode', 'type': 'str'},
'commit_id': {'key': 'commitId', 'type': 'str'},
'code_review_link': {'key': 'codeReviewLink', 'type': 'str'},
'unit_tests_available': {'key': 'unitTestsAvailable', 'type': 'bool'},
'is_compressed': {'key': 'isCompressed', 'type': 'bool'},
'execution_environment': {'key': 'executionEnvironment', 'type': 'str'},
'is_output_markup_enabled': {'key': 'isOutputMarkupEnabled', 'type': 'bool'},
'docker_image_id': {'key': 'dockerImageId', 'type': 'str'},
'docker_image_reference': {'key': 'dockerImageReference', 'type': 'str'},
'docker_image_security_groups': {'key': 'dockerImageSecurityGroups', 'type': 'str'},
'extended_properties': {'key': 'extendedProperties', 'type': 'AetherModuleExtendedProperties'},
'deployment_source': {'key': 'deploymentSource', 'type': 'str'},
'deployment_source_metadata': {'key': 'deploymentSourceMetadata', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'AetherCreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'AetherCloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'source_code_location': {'key': 'sourceCodeLocation', 'type': 'str'},
'size_in_bytes': {'key': 'sizeInBytes', 'type': 'long'},
'download_location': {'key': 'downloadLocation', 'type': 'str'},
'data_location': {'key': 'dataLocation', 'type': 'AetherDataLocation'},
'scripting_runtime_id': {'key': 'scriptingRuntimeId', 'type': 'str'},
'interface_documentation': {'key': 'interfaceDocumentation', 'type': 'AetherEntityInterfaceDocumentation'},
'is_eyes_on': {'key': 'isEyesOn', 'type': 'bool'},
'compliance_cluster': {'key': 'complianceCluster', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'information_url': {'key': 'informationUrl', 'type': 'str'},
'is_experiment_id_in_parameters': {'key': 'isExperimentIdInParameters', 'type': 'bool'},
'interface_string': {'key': 'interfaceString', 'type': 'str'},
'default_parameters': {'key': 'defaultParameters', 'type': '{str}'},
'structured_interface': {'key': 'structuredInterface', 'type': 'AetherStructuredInterface'},
'family_id': {'key': 'familyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'sequence_number_in_family': {'key': 'sequenceNumberInFamily', 'type': 'int'},
'owner': {'key': 'owner', 'type': 'str'},
'azure_tenant_id': {'key': 'azureTenantId', 'type': 'str'},
'azure_user_id': {'key': 'azureUserId', 'type': 'str'},
'collaborators': {'key': 'collaborators', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
}
def __init__(
self,
*,
last_updated_by: Optional["AetherCreatedBy"] = None,
display_name: Optional[str] = None,
module_execution_type: Optional[str] = None,
module_type: Optional[Union[str, "AetherModuleType"]] = None,
module_type_version: Optional[str] = None,
resource_requirements: Optional["AetherResourceModel"] = None,
machine_cluster: Optional[List[str]] = None,
default_compliance_cluster: Optional[str] = None,
repository_type: Optional[Union[str, "AetherRepositoryType"]] = None,
relative_path_to_source_code: Optional[str] = None,
commit_id: Optional[str] = None,
code_review_link: Optional[str] = None,
unit_tests_available: Optional[bool] = None,
is_compressed: Optional[bool] = None,
execution_environment: Optional[Union[str, "AetherExecutionEnvironment"]] = None,
is_output_markup_enabled: Optional[bool] = None,
docker_image_id: Optional[str] = None,
docker_image_reference: Optional[str] = None,
docker_image_security_groups: Optional[str] = None,
extended_properties: Optional["AetherModuleExtendedProperties"] = None,
deployment_source: Optional[Union[str, "AetherModuleDeploymentSource"]] = None,
deployment_source_metadata: Optional[str] = None,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
kv_tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
created_by: Optional["AetherCreatedBy"] = None,
runconfig: Optional[str] = None,
cloud_settings: Optional["AetherCloudSettings"] = None,
category: Optional[str] = None,
step_type: Optional[str] = None,
stage: Optional[str] = None,
upload_state: Optional[Union[str, "AetherUploadState"]] = None,
source_code_location: Optional[str] = None,
size_in_bytes: Optional[int] = None,
download_location: Optional[str] = None,
data_location: Optional["AetherDataLocation"] = None,
scripting_runtime_id: Optional[str] = None,
interface_documentation: Optional["AetherEntityInterfaceDocumentation"] = None,
is_eyes_on: Optional[bool] = None,
compliance_cluster: Optional[str] = None,
is_deterministic: Optional[bool] = None,
information_url: Optional[str] = None,
is_experiment_id_in_parameters: Optional[bool] = None,
interface_string: Optional[str] = None,
default_parameters: Optional[Dict[str, str]] = None,
structured_interface: Optional["AetherStructuredInterface"] = None,
family_id: Optional[str] = None,
name: Optional[str] = None,
hash: Optional[str] = None,
description: Optional[str] = None,
version: Optional[str] = None,
sequence_number_in_family: Optional[int] = None,
owner: Optional[str] = None,
azure_tenant_id: Optional[str] = None,
azure_user_id: Optional[str] = None,
collaborators: Optional[List[str]] = None,
id: Optional[str] = None,
workspace_id: Optional[str] = None,
etag: Optional[str] = None,
tags: Optional[List[str]] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
entity_status: Optional[Union[str, "AetherEntityStatus"]] = None,
**kwargs
):
"""
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.AetherCreatedBy
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.AetherModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword resource_requirements:
:paramtype resource_requirements: ~flow.models.AetherResourceModel
:keyword machine_cluster:
:paramtype machine_cluster: list[str]
:keyword default_compliance_cluster:
:paramtype default_compliance_cluster: str
:keyword repository_type: Possible values include: "None", "Other", "Git", "SourceDepot",
"Cosmos".
:paramtype repository_type: str or ~flow.models.AetherRepositoryType
:keyword relative_path_to_source_code:
:paramtype relative_path_to_source_code: str
:keyword commit_id:
:paramtype commit_id: str
:keyword code_review_link:
:paramtype code_review_link: str
:keyword unit_tests_available:
:paramtype unit_tests_available: bool
:keyword is_compressed:
:paramtype is_compressed: bool
:keyword execution_environment: Possible values include: "ExeWorkerMachine",
"DockerContainerWithoutNetwork", "DockerContainerWithNetwork", "HyperVWithoutNetwork",
"HyperVWithNetwork".
:paramtype execution_environment: str or ~flow.models.AetherExecutionEnvironment
:keyword is_output_markup_enabled:
:paramtype is_output_markup_enabled: bool
:keyword docker_image_id:
:paramtype docker_image_id: str
:keyword docker_image_reference:
:paramtype docker_image_reference: str
:keyword docker_image_security_groups:
:paramtype docker_image_security_groups: str
:keyword extended_properties:
:paramtype extended_properties: ~flow.models.AetherModuleExtendedProperties
:keyword deployment_source: Possible values include: "Client", "AutoDeployment", "Vsts".
:paramtype deployment_source: str or ~flow.models.AetherModuleDeploymentSource
:keyword deployment_source_metadata:
:paramtype deployment_source_metadata: str
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword kv_tags: This is a dictionary.
:paramtype kv_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.AetherCreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.AetherCloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.AetherUploadState
:keyword source_code_location:
:paramtype source_code_location: str
:keyword size_in_bytes:
:paramtype size_in_bytes: long
:keyword download_location:
:paramtype download_location: str
:keyword data_location:
:paramtype data_location: ~flow.models.AetherDataLocation
:keyword scripting_runtime_id:
:paramtype scripting_runtime_id: str
:keyword interface_documentation:
:paramtype interface_documentation: ~flow.models.AetherEntityInterfaceDocumentation
:keyword is_eyes_on:
:paramtype is_eyes_on: bool
:keyword compliance_cluster:
:paramtype compliance_cluster: str
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword information_url:
:paramtype information_url: str
:keyword is_experiment_id_in_parameters:
:paramtype is_experiment_id_in_parameters: bool
:keyword interface_string:
:paramtype interface_string: str
:keyword default_parameters: This is a dictionary.
:paramtype default_parameters: dict[str, str]
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.AetherStructuredInterface
:keyword family_id:
:paramtype family_id: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword version:
:paramtype version: str
:keyword sequence_number_in_family:
:paramtype sequence_number_in_family: int
:keyword owner:
:paramtype owner: str
:keyword azure_tenant_id:
:paramtype azure_tenant_id: str
:keyword azure_user_id:
:paramtype azure_user_id: str
:keyword collaborators:
:paramtype collaborators: list[str]
:keyword id:
:paramtype id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword etag:
:paramtype etag: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.AetherEntityStatus
"""
super(AetherModuleEntity, self).__init__(**kwargs)
self.last_updated_by = last_updated_by
self.display_name = display_name
self.module_execution_type = module_execution_type
self.module_type = module_type
self.module_type_version = module_type_version
self.resource_requirements = resource_requirements
self.machine_cluster = machine_cluster
self.default_compliance_cluster = default_compliance_cluster
self.repository_type = repository_type
self.relative_path_to_source_code = relative_path_to_source_code
self.commit_id = commit_id
self.code_review_link = code_review_link
self.unit_tests_available = unit_tests_available
self.is_compressed = is_compressed
self.execution_environment = execution_environment
self.is_output_markup_enabled = is_output_markup_enabled
self.docker_image_id = docker_image_id
self.docker_image_reference = docker_image_reference
self.docker_image_security_groups = docker_image_security_groups
self.extended_properties = extended_properties
self.deployment_source = deployment_source
self.deployment_source_metadata = deployment_source_metadata
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
self.kv_tags = kv_tags
self.properties = properties
self.created_by = created_by
self.runconfig = runconfig
self.cloud_settings = cloud_settings
self.category = category
self.step_type = step_type
self.stage = stage
self.upload_state = upload_state
self.source_code_location = source_code_location
self.size_in_bytes = size_in_bytes
self.download_location = download_location
self.data_location = data_location
self.scripting_runtime_id = scripting_runtime_id
self.interface_documentation = interface_documentation
self.is_eyes_on = is_eyes_on
self.compliance_cluster = compliance_cluster
self.is_deterministic = is_deterministic
self.information_url = information_url
self.is_experiment_id_in_parameters = is_experiment_id_in_parameters
self.interface_string = interface_string
self.default_parameters = default_parameters
self.structured_interface = structured_interface
self.family_id = family_id
self.name = name
self.hash = hash
self.description = description
self.version = version
self.sequence_number_in_family = sequence_number_in_family
self.owner = owner
self.azure_tenant_id = azure_tenant_id
self.azure_user_id = azure_user_id
self.collaborators = collaborators
self.id = id
self.workspace_id = workspace_id
self.etag = etag
self.tags = tags
self.created_date = created_date
self.last_modified_date = last_modified_date
self.entity_status = entity_status
class AetherModuleExtendedProperties(msrest.serialization.Model):
"""AetherModuleExtendedProperties.
:ivar auto_deployed_artifact:
:vartype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:ivar script_needs_approval:
:vartype script_needs_approval: bool
"""
_attribute_map = {
'auto_deployed_artifact': {'key': 'autoDeployedArtifact', 'type': 'AetherBuildArtifactInfo'},
'script_needs_approval': {'key': 'scriptNeedsApproval', 'type': 'bool'},
}
def __init__(
self,
*,
auto_deployed_artifact: Optional["AetherBuildArtifactInfo"] = None,
script_needs_approval: Optional[bool] = None,
**kwargs
):
"""
:keyword auto_deployed_artifact:
:paramtype auto_deployed_artifact: ~flow.models.AetherBuildArtifactInfo
:keyword script_needs_approval:
:paramtype script_needs_approval: bool
"""
super(AetherModuleExtendedProperties, self).__init__(**kwargs)
self.auto_deployed_artifact = auto_deployed_artifact
self.script_needs_approval = script_needs_approval
class AetherNCrossValidations(msrest.serialization.Model):
"""AetherNCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherNCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherNCrossValidationMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherNCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(AetherNCrossValidations, self).__init__(**kwargs)
self.mode = mode
self.value = value
class AetherOutputSetting(msrest.serialization.Model):
"""AetherOutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.AetherDatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'AetherParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'AetherParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'AetherParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'AetherDatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'AetherDatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
data_store_name: Optional[str] = None,
data_store_name_parameter_assignment: Optional["AetherParameterAssignment"] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
data_store_mode_parameter_assignment: Optional["AetherParameterAssignment"] = None,
path_on_compute: Optional[str] = None,
path_on_compute_parameter_assignment: Optional["AetherParameterAssignment"] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
web_service_port: Optional[str] = None,
dataset_registration: Optional["AetherDatasetRegistration"] = None,
dataset_output_options: Optional["AetherDatasetOutputOptions"] = None,
asset_output_settings: Optional["AetherAssetOutputSettings"] = None,
parameter_name: Optional[str] = None,
asset_output_settings_parameter_name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.AetherParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.AetherDatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.AetherDatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(AetherOutputSetting, self).__init__(**kwargs)
self.name = name
self.data_store_name = data_store_name
self.data_store_name_parameter_assignment = data_store_name_parameter_assignment
self.data_store_mode = data_store_mode
self.data_store_mode_parameter_assignment = data_store_mode_parameter_assignment
self.path_on_compute = path_on_compute
self.path_on_compute_parameter_assignment = path_on_compute_parameter_assignment
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.web_service_port = web_service_port
self.dataset_registration = dataset_registration
self.dataset_output_options = dataset_output_options
self.asset_output_settings = asset_output_settings
self.parameter_name = parameter_name
self.asset_output_settings_parameter_name = asset_output_settings_parameter_name
class AetherParallelForControlFlowInfo(msrest.serialization.Model):
"""AetherParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'AetherParameterAssignment'},
}
def __init__(
self,
*,
parallel_for_items_input: Optional["AetherParameterAssignment"] = None,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.AetherParameterAssignment
"""
super(AetherParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = parallel_for_items_input
class AetherParameterAssignment(msrest.serialization.Model):
"""AetherParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.AetherParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.AetherLegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[AetherParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'AetherLegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'AetherDataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
value_type: Optional[Union[str, "AetherParameterValueType"]] = None,
assignments_to_concatenate: Optional[List["AetherParameterAssignment"]] = None,
data_path_assignment: Optional["AetherLegacyDataPath"] = None,
data_set_definition_value_assignment: Optional["AetherDataSetDefinitionValue"] = None,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.AetherParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.AetherParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.AetherLegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.AetherDataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(AetherParameterAssignment, self).__init__(**kwargs)
self.value_type = value_type
self.assignments_to_concatenate = assignments_to_concatenate
self.data_path_assignment = data_path_assignment
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.name = name
self.value = value
class AetherPhillyHdfsReference(msrest.serialization.Model):
"""AetherPhillyHdfsReference.
:ivar cluster:
:vartype cluster: str
:ivar vc:
:vartype vc: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'cluster': {'key': 'cluster', 'type': 'str'},
'vc': {'key': 'vc', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
cluster: Optional[str] = None,
vc: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword cluster:
:paramtype cluster: str
:keyword vc:
:paramtype vc: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AetherPhillyHdfsReference, self).__init__(**kwargs)
self.cluster = cluster
self.vc = vc
self.relative_path = relative_path
class AetherPortInfo(msrest.serialization.Model):
"""AetherPortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
port_name: Optional[str] = None,
graph_port_name: Optional[str] = None,
is_parameter: Optional[bool] = None,
web_service_port: Optional[str] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(AetherPortInfo, self).__init__(**kwargs)
self.node_id = node_id
self.port_name = port_name
self.graph_port_name = graph_port_name
self.is_parameter = is_parameter
self.web_service_port = web_service_port
class AetherPriorityConfig(msrest.serialization.Model):
"""AetherPriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
*,
job_priority: Optional[int] = None,
is_preemptible: Optional[bool] = None,
node_count_set: Optional[List[int]] = None,
scale_interval: Optional[int] = None,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AetherPriorityConfig, self).__init__(**kwargs)
self.job_priority = job_priority
self.is_preemptible = is_preemptible
self.node_count_set = node_count_set
self.scale_interval = scale_interval
class AetherPriorityConfiguration(msrest.serialization.Model):
"""AetherPriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
*,
cloud_priority: Optional[int] = None,
string_type_priority: Optional[str] = None,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(AetherPriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = cloud_priority
self.string_type_priority = string_type_priority
class AetherRegisteredDataSetReference(msrest.serialization.Model):
"""AetherRegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AetherRegisteredDataSetReference, self).__init__(**kwargs)
self.id = id
self.name = name
self.version = version
class AetherRemoteDockerComputeInfo(msrest.serialization.Model):
"""AetherRemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
*,
address: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
private_key: Optional[str] = None,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(AetherRemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = address
self.username = username
self.password = password
self.private_key = private_key
class AetherResourceAssignment(msrest.serialization.Model):
"""AetherResourceAssignment.
:ivar attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:vartype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
_attribute_map = {
'attributes': {'key': 'attributes', 'type': '{AetherResourceAttributeAssignment}'},
}
def __init__(
self,
*,
attributes: Optional[Dict[str, "AetherResourceAttributeAssignment"]] = None,
**kwargs
):
"""
:keyword attributes: Dictionary of :code:`<AetherResourceAttributeAssignment>`.
:paramtype attributes: dict[str, ~flow.models.AetherResourceAttributeAssignment]
"""
super(AetherResourceAssignment, self).__init__(**kwargs)
self.attributes = attributes
class AetherResourceAttributeAssignment(msrest.serialization.Model):
"""AetherResourceAttributeAssignment.
:ivar attribute:
:vartype attribute: ~flow.models.AetherResourceAttributeDefinition
:ivar operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:vartype operator: str or ~flow.models.AetherResourceOperator
:ivar value:
:vartype value: str
"""
_attribute_map = {
'attribute': {'key': 'attribute', 'type': 'AetherResourceAttributeDefinition'},
'operator': {'key': 'operator', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
attribute: Optional["AetherResourceAttributeDefinition"] = None,
operator: Optional[Union[str, "AetherResourceOperator"]] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword attribute:
:paramtype attribute: ~flow.models.AetherResourceAttributeDefinition
:keyword operator: Possible values include: "Equal", "Contain", "GreaterOrEqual".
:paramtype operator: str or ~flow.models.AetherResourceOperator
:keyword value:
:paramtype value: str
"""
super(AetherResourceAttributeAssignment, self).__init__(**kwargs)
self.attribute = attribute
self.operator = operator
self.value = value
class AetherResourceAttributeDefinition(msrest.serialization.Model):
"""AetherResourceAttributeDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "String", "Double".
:vartype type: str or ~flow.models.AetherResourceValueType
:ivar units:
:vartype units: str
:ivar allowed_operators:
:vartype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
_validation = {
'allowed_operators': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'units': {'key': 'units', 'type': 'str'},
'allowed_operators': {'key': 'allowedOperators', 'type': '[str]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "AetherResourceValueType"]] = None,
units: Optional[str] = None,
allowed_operators: Optional[List[Union[str, "AetherResourceOperator"]]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "String", "Double".
:paramtype type: str or ~flow.models.AetherResourceValueType
:keyword units:
:paramtype units: str
:keyword allowed_operators:
:paramtype allowed_operators: list[str or ~flow.models.AetherResourceOperator]
"""
super(AetherResourceAttributeDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.units = units
self.allowed_operators = allowed_operators
class AetherResourceConfig(msrest.serialization.Model):
"""AetherResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
*,
gpu_count: Optional[int] = None,
cpu_count: Optional[int] = None,
memory_request_in_gb: Optional[int] = None,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(AetherResourceConfig, self).__init__(**kwargs)
self.gpu_count = gpu_count
self.cpu_count = cpu_count
self.memory_request_in_gb = memory_request_in_gb
class AetherResourceConfiguration(msrest.serialization.Model):
"""AetherResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
*,
instance_count: Optional[int] = None,
instance_type: Optional[str] = None,
properties: Optional[Dict[str, Any]] = None,
locations: Optional[List[str]] = None,
instance_priority: Optional[str] = None,
quota_enforcement_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AetherResourceConfiguration, self).__init__(**kwargs)
self.instance_count = instance_count
self.instance_type = instance_type
self.properties = properties
self.locations = locations
self.instance_priority = instance_priority
self.quota_enforcement_resource_id = quota_enforcement_resource_id
class AetherResourceModel(msrest.serialization.Model):
"""AetherResourceModel.
:ivar resources:
:vartype resources: list[~flow.models.AetherResourceAssignment]
"""
_attribute_map = {
'resources': {'key': 'resources', 'type': '[AetherResourceAssignment]'},
}
def __init__(
self,
*,
resources: Optional[List["AetherResourceAssignment"]] = None,
**kwargs
):
"""
:keyword resources:
:paramtype resources: list[~flow.models.AetherResourceAssignment]
"""
super(AetherResourceModel, self).__init__(**kwargs)
self.resources = resources
class AetherResourcesSetting(msrest.serialization.Model):
"""AetherResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
*,
instance_size: Optional[str] = None,
spark_version: Optional[str] = None,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(AetherResourcesSetting, self).__init__(**kwargs)
self.instance_size = instance_size
self.spark_version = spark_version
class AetherSavedDataSetReference(msrest.serialization.Model):
"""AetherSavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(AetherSavedDataSetReference, self).__init__(**kwargs)
self.id = id
class AetherScopeCloudConfiguration(msrest.serialization.Model):
"""AetherScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{AetherArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
*,
input_path_suffixes: Optional[Dict[str, "AetherArgumentAssignment"]] = None,
output_path_suffixes: Optional[Dict[str, "AetherArgumentAssignment"]] = None,
user_alias: Optional[str] = None,
tokens: Optional[int] = None,
auto_token: Optional[int] = None,
vcp: Optional[float] = None,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.AetherArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(AetherScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = input_path_suffixes
self.output_path_suffixes = output_path_suffixes
self.user_alias = user_alias
self.tokens = tokens
self.auto_token = auto_token
self.vcp = vcp
class AetherSeasonality(msrest.serialization.Model):
"""AetherSeasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherSeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherSeasonalityMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherSeasonalityMode
:keyword value:
:paramtype value: int
"""
super(AetherSeasonality, self).__init__(**kwargs)
self.mode = mode
self.value = value
class AetherSqlDataPath(msrest.serialization.Model):
"""AetherSqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[AetherStoredProcedureParameter]'},
}
def __init__(
self,
*,
sql_table_name: Optional[str] = None,
sql_query: Optional[str] = None,
sql_stored_procedure_name: Optional[str] = None,
sql_stored_procedure_params: Optional[List["AetherStoredProcedureParameter"]] = None,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.AetherStoredProcedureParameter]
"""
super(AetherSqlDataPath, self).__init__(**kwargs)
self.sql_table_name = sql_table_name
self.sql_query = sql_query
self.sql_stored_procedure_name = sql_stored_procedure_name
self.sql_stored_procedure_params = sql_stored_procedure_params
class AetherStackEnsembleSettings(msrest.serialization.Model):
"""AetherStackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
*,
stack_meta_learner_type: Optional[Union[str, "AetherStackMetaLearnerType"]] = None,
stack_meta_learner_train_percentage: Optional[float] = None,
stack_meta_learner_k_wargs: Optional[Any] = None,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.AetherStackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(AetherStackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = stack_meta_learner_type
self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage
self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs
class AetherStoredProcedureParameter(msrest.serialization.Model):
"""AetherStoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
type: Optional[Union[str, "AetherStoredProcedureParameterType"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.AetherStoredProcedureParameterType
"""
super(AetherStoredProcedureParameter, self).__init__(**kwargs)
self.name = name
self.value = value
self.type = type
class AetherStructuredInterface(msrest.serialization.Model):
"""AetherStructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.AetherControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.AetherArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[AetherStructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[AetherStructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[AetherControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[AetherStructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[AetherStructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[AetherArgumentAssignment]'},
}
def __init__(
self,
*,
command_line_pattern: Optional[str] = None,
inputs: Optional[List["AetherStructuredInterfaceInput"]] = None,
outputs: Optional[List["AetherStructuredInterfaceOutput"]] = None,
control_outputs: Optional[List["AetherControlOutput"]] = None,
parameters: Optional[List["AetherStructuredInterfaceParameter"]] = None,
metadata_parameters: Optional[List["AetherStructuredInterfaceParameter"]] = None,
arguments: Optional[List["AetherArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.AetherStructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.AetherStructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.AetherControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.AetherStructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.AetherArgumentAssignment]
"""
super(AetherStructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = command_line_pattern
self.inputs = inputs
self.outputs = outputs
self.control_outputs = control_outputs
self.parameters = parameters
self.metadata_parameters = metadata_parameters
self.arguments = arguments
class AetherStructuredInterfaceInput(msrest.serialization.Model):
"""AetherStructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.AetherDatasetType]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
data_type_ids_list: Optional[List[str]] = None,
is_optional: Optional[bool] = None,
description: Optional[str] = None,
skip_processing: Optional[bool] = None,
is_resource: Optional[bool] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
dataset_types: Optional[List[Union[str, "AetherDatasetType"]]] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.AetherDatasetType]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AetherStructuredInterfaceInput, self).__init__(**kwargs)
self.name = name
self.label = label
self.data_type_ids_list = data_type_ids_list
self.is_optional = is_optional
self.description = description
self.skip_processing = skip_processing
self.is_resource = is_resource
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.dataset_types = dataset_types
self.additional_transformations = additional_transformations
class AetherStructuredInterfaceOutput(msrest.serialization.Model):
"""AetherStructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AetherDataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.AetherTrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.AetherDatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'isArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'AetherTrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'AetherDatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AetherAssetOutputSettings'},
'early_available': {'key': 'earlyAvailable', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
data_type_id: Optional[str] = None,
pass_through_data_type_input_name: Optional[str] = None,
description: Optional[str] = None,
skip_processing: Optional[bool] = None,
is_artifact: Optional[bool] = None,
data_store_name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AetherDataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
training_output: Optional["AetherTrainingOutput"] = None,
dataset_output: Optional["AetherDatasetOutput"] = None,
asset_output_settings: Optional["AetherAssetOutputSettings"] = None,
early_available: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AetherDataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.AetherTrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.AetherDatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AetherAssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(AetherStructuredInterfaceOutput, self).__init__(**kwargs)
self.name = name
self.label = label
self.data_type_id = data_type_id
self.pass_through_data_type_input_name = pass_through_data_type_input_name
self.description = description
self.skip_processing = skip_processing
self.is_artifact = is_artifact
self.data_store_name = data_store_name
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.training_output = training_output
self.dataset_output = dataset_output
self.asset_output_settings = asset_output_settings
self.early_available = early_available
class AetherStructuredInterfaceParameter(msrest.serialization.Model):
"""AetherStructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.AetherParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.AetherUIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'AetherUIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
parameter_type: Optional[Union[str, "AetherParameterType"]] = None,
is_optional: Optional[bool] = None,
default_value: Optional[str] = None,
lower_bound: Optional[str] = None,
upper_bound: Optional[str] = None,
enum_values: Optional[List[str]] = None,
enum_values_to_argument_strings: Optional[Dict[str, str]] = None,
description: Optional[str] = None,
set_environment_variable: Optional[bool] = None,
environment_variable_override: Optional[str] = None,
enabled_by_parameter_name: Optional[str] = None,
enabled_by_parameter_values: Optional[List[str]] = None,
ui_hint: Optional["AetherUIParameterHint"] = None,
group_names: Optional[List[str]] = None,
argument_name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.AetherParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.AetherUIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(AetherStructuredInterfaceParameter, self).__init__(**kwargs)
self.name = name
self.label = label
self.parameter_type = parameter_type
self.is_optional = is_optional
self.default_value = default_value
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.enum_values = enum_values
self.enum_values_to_argument_strings = enum_values_to_argument_strings
self.description = description
self.set_environment_variable = set_environment_variable
self.environment_variable_override = environment_variable_override
self.enabled_by_parameter_name = enabled_by_parameter_name
self.enabled_by_parameter_values = enabled_by_parameter_values
self.ui_hint = ui_hint
self.group_names = group_names
self.argument_name = argument_name
class AetherSubGraphConfiguration(msrest.serialization.Model):
"""AetherSubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_compute_internal:
:vartype default_compute_internal: ~flow.models.AetherComputeSetting
:ivar default_datastore_internal:
:vartype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:ivar user_alias:
:vartype user_alias: str
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_compute_internal': {'key': 'defaultComputeInternal', 'type': 'AetherComputeSetting'},
'default_datastore_internal': {'key': 'defaultDatastoreInternal', 'type': 'AetherDatastoreSetting'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'AetherCloudPrioritySetting'},
'user_alias': {'key': 'UserAlias', 'type': 'str'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
*,
graph_id: Optional[str] = None,
graph_draft_id: Optional[str] = None,
default_compute_internal: Optional["AetherComputeSetting"] = None,
default_datastore_internal: Optional["AetherDatastoreSetting"] = None,
default_cloud_priority: Optional["AetherCloudPrioritySetting"] = None,
user_alias: Optional[str] = None,
is_dynamic: Optional[bool] = False,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_compute_internal:
:paramtype default_compute_internal: ~flow.models.AetherComputeSetting
:keyword default_datastore_internal:
:paramtype default_datastore_internal: ~flow.models.AetherDatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.AetherCloudPrioritySetting
:keyword user_alias:
:paramtype user_alias: str
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(AetherSubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = graph_id
self.graph_draft_id = graph_draft_id
self.default_compute_internal = default_compute_internal
self.default_datastore_internal = default_datastore_internal
self.default_cloud_priority = default_cloud_priority
self.user_alias = user_alias
self.is_dynamic = is_dynamic
class AetherSweepEarlyTerminationPolicy(msrest.serialization.Model):
"""AetherSweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
*,
policy_type: Optional[Union[str, "AetherEarlyTerminationPolicyType"]] = None,
evaluation_interval: Optional[int] = None,
delay_evaluation: Optional[int] = None,
slack_factor: Optional[float] = None,
slack_amount: Optional[float] = None,
truncation_percentage: Optional[int] = None,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.AetherEarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(AetherSweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = policy_type
self.evaluation_interval = evaluation_interval
self.delay_evaluation = delay_evaluation
self.slack_factor = slack_factor
self.slack_amount = slack_amount
self.truncation_percentage = truncation_percentage
class AetherSweepSettings(msrest.serialization.Model):
"""AetherSweepSettings.
:ivar limits:
:vartype limits: ~flow.models.AetherSweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'AetherSweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'AetherSweepEarlyTerminationPolicy'},
}
def __init__(
self,
*,
limits: Optional["AetherSweepSettingsLimits"] = None,
search_space: Optional[List[Dict[str, str]]] = None,
sampling_algorithm: Optional[Union[str, "AetherSamplingAlgorithmType"]] = None,
early_termination: Optional["AetherSweepEarlyTerminationPolicy"] = None,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.AetherSweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.AetherSamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.AetherSweepEarlyTerminationPolicy
"""
super(AetherSweepSettings, self).__init__(**kwargs)
self.limits = limits
self.search_space = search_space
self.sampling_algorithm = sampling_algorithm
self.early_termination = early_termination
class AetherSweepSettingsLimits(msrest.serialization.Model):
"""AetherSweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
*,
max_total_trials: Optional[int] = None,
max_concurrent_trials: Optional[int] = None,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(AetherSweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = max_total_trials
self.max_concurrent_trials = max_concurrent_trials
class AetherTargetLags(msrest.serialization.Model):
"""AetherTargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherTargetLagsMode"]] = None,
values: Optional[List[int]] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(AetherTargetLags, self).__init__(**kwargs)
self.mode = mode
self.values = values
class AetherTargetRollingWindowSize(msrest.serialization.Model):
"""AetherTargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "AetherTargetRollingWindowSizeMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.AetherTargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(AetherTargetRollingWindowSize, self).__init__(**kwargs)
self.mode = mode
self.value = value
class AetherTargetSelectorConfiguration(msrest.serialization.Model):
"""AetherTargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
*,
low_priority_vm_tolerant: Optional[bool] = None,
cluster_block_list: Optional[List[str]] = None,
compute_type: Optional[str] = None,
instance_type: Optional[List[str]] = None,
instance_types: Optional[List[str]] = None,
my_resource_only: Optional[bool] = None,
plan_id: Optional[str] = None,
plan_region_id: Optional[str] = None,
region: Optional[List[str]] = None,
regions: Optional[List[str]] = None,
vc_block_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(AetherTargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = low_priority_vm_tolerant
self.cluster_block_list = cluster_block_list
self.compute_type = compute_type
self.instance_type = instance_type
self.instance_types = instance_types
self.my_resource_only = my_resource_only
self.plan_id = plan_id
self.plan_region_id = plan_region_id
self.region = region
self.regions = regions
self.vc_block_list = vc_block_list
class AetherTestDataSettings(msrest.serialization.Model):
"""AetherTestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
*,
test_data_size: Optional[float] = None,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(AetherTestDataSettings, self).__init__(**kwargs)
self.test_data_size = test_data_size
class AetherTorchDistributedConfiguration(msrest.serialization.Model):
"""AetherTorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
*,
process_count_per_node: Optional[int] = None,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(AetherTorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = process_count_per_node
class AetherTrainingOutput(msrest.serialization.Model):
"""AetherTrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.AetherTrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
*,
training_output_type: Optional[Union[str, "AetherTrainingOutputType"]] = None,
iteration: Optional[int] = None,
metric: Optional[str] = None,
model_file: Optional[str] = None,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.AetherTrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(AetherTrainingOutput, self).__init__(**kwargs)
self.training_output_type = training_output_type
self.iteration = iteration
self.metric = metric
self.model_file = model_file
class AetherTrainingSettings(msrest.serialization.Model):
"""AetherTrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'AetherStackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
*,
block_list_models: Optional[List[str]] = None,
allow_list_models: Optional[List[str]] = None,
enable_dnn_training: Optional[bool] = None,
enable_onnx_compatible_models: Optional[bool] = None,
stack_ensemble_settings: Optional["AetherStackEnsembleSettings"] = None,
enable_stack_ensemble: Optional[bool] = None,
enable_vote_ensemble: Optional[bool] = None,
ensemble_model_download_timeout: Optional[str] = None,
enable_model_explainability: Optional[bool] = None,
training_mode: Optional[Union[str, "AetherTabularTrainingMode"]] = None,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.AetherStackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.AetherTabularTrainingMode
"""
super(AetherTrainingSettings, self).__init__(**kwargs)
self.block_list_models = block_list_models
self.allow_list_models = allow_list_models
self.enable_dnn_training = enable_dnn_training
self.enable_onnx_compatible_models = enable_onnx_compatible_models
self.stack_ensemble_settings = stack_ensemble_settings
self.enable_stack_ensemble = enable_stack_ensemble
self.enable_vote_ensemble = enable_vote_ensemble
self.ensemble_model_download_timeout = ensemble_model_download_timeout
self.enable_model_explainability = enable_model_explainability
self.training_mode = training_mode
class AetherUIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""AetherUIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'AetherUIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
*,
capabilities: Optional["AetherUIAzureOpenAIModelCapabilities"] = None,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.AetherUIAzureOpenAIModelCapabilities
"""
super(AetherUIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = capabilities
class AetherUIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AetherUIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
*,
completion: Optional[bool] = None,
chat_completion: Optional[bool] = None,
embeddings: Optional[bool] = None,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AetherUIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = completion
self.chat_completion = chat_completion
self.embeddings = embeddings
class AetherUIColumnPicker(msrest.serialization.Model):
"""AetherUIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
*,
column_picker_for: Optional[str] = None,
column_selection_categories: Optional[List[str]] = None,
single_column_selection: Optional[bool] = None,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(AetherUIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = column_picker_for
self.column_selection_categories = column_selection_categories
self.single_column_selection = single_column_selection
class AetherUIJsonEditor(msrest.serialization.Model):
"""AetherUIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
*,
json_schema: Optional[str] = None,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(AetherUIJsonEditor, self).__init__(**kwargs)
self.json_schema = json_schema
class AetherUIParameterHint(msrest.serialization.Model):
"""AetherUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.AetherUIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.AetherUIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'AetherUIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'AetherUIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'AetherUIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'AetherUIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
*,
ui_widget_type: Optional[Union[str, "AetherUIWidgetTypeEnum"]] = None,
column_picker: Optional["AetherUIColumnPicker"] = None,
ui_script_language: Optional[Union[str, "AetherUIScriptLanguageEnum"]] = None,
json_editor: Optional["AetherUIJsonEditor"] = None,
prompt_flow_connection_selector: Optional["AetherUIPromptFlowConnectionSelector"] = None,
azure_open_ai_deployment_name_selector: Optional["AetherUIAzureOpenAIDeploymentNameSelector"] = None,
ux_ignore: Optional[bool] = None,
anonymous: Optional[bool] = None,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.AetherUIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.AetherUIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.AetherUIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.AetherUIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.AetherUIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.AetherUIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(AetherUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = ui_widget_type
self.column_picker = column_picker
self.ui_script_language = ui_script_language
self.json_editor = json_editor
self.prompt_flow_connection_selector = prompt_flow_connection_selector
self.azure_open_ai_deployment_name_selector = azure_open_ai_deployment_name_selector
self.ux_ignore = ux_ignore
self.anonymous = anonymous
class AetherUIPromptFlowConnectionSelector(msrest.serialization.Model):
"""AetherUIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
*,
prompt_flow_connection_type: Optional[str] = None,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(AetherUIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = prompt_flow_connection_type
class AetherValidationDataSettings(msrest.serialization.Model):
"""AetherValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.AetherNCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'AetherNCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
*,
n_cross_validations: Optional["AetherNCrossValidations"] = None,
validation_data_size: Optional[float] = None,
cv_split_column_names: Optional[List[str]] = None,
validation_type: Optional[str] = None,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.AetherNCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(AetherValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = n_cross_validations
self.validation_data_size = validation_data_size
self.cv_split_column_names = cv_split_column_names
self.validation_type = validation_type
class AetherVsoBuildArtifactInfo(msrest.serialization.Model):
"""AetherVsoBuildArtifactInfo.
:ivar build_info:
:vartype build_info: ~flow.models.AetherVsoBuildInfo
:ivar download_url:
:vartype download_url: str
"""
_attribute_map = {
'build_info': {'key': 'buildInfo', 'type': 'AetherVsoBuildInfo'},
'download_url': {'key': 'downloadUrl', 'type': 'str'},
}
def __init__(
self,
*,
build_info: Optional["AetherVsoBuildInfo"] = None,
download_url: Optional[str] = None,
**kwargs
):
"""
:keyword build_info:
:paramtype build_info: ~flow.models.AetherVsoBuildInfo
:keyword download_url:
:paramtype download_url: str
"""
super(AetherVsoBuildArtifactInfo, self).__init__(**kwargs)
self.build_info = build_info
self.download_url = download_url
class AetherVsoBuildDefinitionInfo(msrest.serialization.Model):
"""AetherVsoBuildDefinitionInfo.
:ivar account_name:
:vartype account_name: str
:ivar project_id:
:vartype project_id: str
:ivar build_definition_id:
:vartype build_definition_id: int
"""
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'project_id': {'key': 'projectId', 'type': 'str'},
'build_definition_id': {'key': 'buildDefinitionId', 'type': 'int'},
}
def __init__(
self,
*,
account_name: Optional[str] = None,
project_id: Optional[str] = None,
build_definition_id: Optional[int] = None,
**kwargs
):
"""
:keyword account_name:
:paramtype account_name: str
:keyword project_id:
:paramtype project_id: str
:keyword build_definition_id:
:paramtype build_definition_id: int
"""
super(AetherVsoBuildDefinitionInfo, self).__init__(**kwargs)
self.account_name = account_name
self.project_id = project_id
self.build_definition_id = build_definition_id
class AetherVsoBuildInfo(msrest.serialization.Model):
"""AetherVsoBuildInfo.
:ivar definition_info:
:vartype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:ivar build_id:
:vartype build_id: int
"""
_attribute_map = {
'definition_info': {'key': 'definitionInfo', 'type': 'AetherVsoBuildDefinitionInfo'},
'build_id': {'key': 'buildId', 'type': 'int'},
}
def __init__(
self,
*,
definition_info: Optional["AetherVsoBuildDefinitionInfo"] = None,
build_id: Optional[int] = None,
**kwargs
):
"""
:keyword definition_info:
:paramtype definition_info: ~flow.models.AetherVsoBuildDefinitionInfo
:keyword build_id:
:paramtype build_id: int
"""
super(AetherVsoBuildInfo, self).__init__(**kwargs)
self.definition_info = definition_info
self.build_id = build_id
class AEVAComputeConfiguration(msrest.serialization.Model):
"""AEVAComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar is_preemptable:
:vartype is_preemptable: bool
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'is_preemptable': {'key': 'isPreemptable', 'type': 'bool'},
}
def __init__(
self,
*,
target: Optional[str] = None,
instance_count: Optional[int] = None,
is_local: Optional[bool] = None,
location: Optional[str] = None,
is_clusterless: Optional[bool] = None,
instance_type: Optional[str] = None,
properties: Optional[Dict[str, Any]] = None,
is_preemptable: Optional[bool] = None,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword is_preemptable:
:paramtype is_preemptable: bool
"""
super(AEVAComputeConfiguration, self).__init__(**kwargs)
self.target = target
self.instance_count = instance_count
self.is_local = is_local
self.location = location
self.is_clusterless = is_clusterless
self.instance_type = instance_type
self.properties = properties
self.is_preemptable = is_preemptable
class AEVAResourceConfiguration(msrest.serialization.Model):
"""AEVAResourceConfiguration.
:ivar instance_count:
:vartype instance_count: int
:ivar instance_type:
:vartype instance_type: str
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
:ivar locations:
:vartype locations: list[str]
:ivar instance_priority:
:vartype instance_priority: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
"""
_attribute_map = {
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'locations': {'key': 'locations', 'type': '[str]'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
}
def __init__(
self,
*,
instance_count: Optional[int] = None,
instance_type: Optional[str] = None,
properties: Optional[Dict[str, Any]] = None,
locations: Optional[List[str]] = None,
instance_priority: Optional[str] = None,
quota_enforcement_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword instance_count:
:paramtype instance_count: int
:keyword instance_type:
:paramtype instance_type: str
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
:keyword locations:
:paramtype locations: list[str]
:keyword instance_priority:
:paramtype instance_priority: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
"""
super(AEVAResourceConfiguration, self).__init__(**kwargs)
self.instance_count = instance_count
self.instance_type = instance_type
self.properties = properties
self.locations = locations
self.instance_priority = instance_priority
self.quota_enforcement_resource_id = quota_enforcement_resource_id
class AISuperComputerConfiguration(msrest.serialization.Model):
"""AISuperComputerConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar instance_types:
:vartype instance_types: list[str]
:ivar image_version:
:vartype image_version: str
:ivar location:
:vartype location: str
:ivar locations:
:vartype locations: list[str]
:ivar ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:vartype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:ivar interactive:
:vartype interactive: bool
:ivar scale_policy:
:vartype scale_policy: ~flow.models.AISuperComputerScalePolicy
:ivar virtual_cluster_arm_id:
:vartype virtual_cluster_arm_id: str
:ivar tensorboard_log_directory:
:vartype tensorboard_log_directory: str
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar ssh_public_keys:
:vartype ssh_public_keys: list[str]
:ivar enable_azml_int:
:vartype enable_azml_int: bool
:ivar priority:
:vartype priority: str
:ivar sla_tier:
:vartype sla_tier: str
:ivar suspend_on_idle_time_hours:
:vartype suspend_on_idle_time_hours: long
:ivar user_alias:
:vartype user_alias: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
:ivar group_policy_name:
:vartype group_policy_name: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'image_version': {'key': 'imageVersion', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'ai_super_computer_storage_data': {'key': 'aiSuperComputerStorageData', 'type': '{AISuperComputerStorageReferenceConfiguration}'},
'interactive': {'key': 'interactive', 'type': 'bool'},
'scale_policy': {'key': 'scalePolicy', 'type': 'AISuperComputerScalePolicy'},
'virtual_cluster_arm_id': {'key': 'virtualClusterArmId', 'type': 'str'},
'tensorboard_log_directory': {'key': 'tensorboardLogDirectory', 'type': 'str'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'ssh_public_keys': {'key': 'sshPublicKeys', 'type': '[str]'},
'enable_azml_int': {'key': 'enableAzmlInt', 'type': 'bool'},
'priority': {'key': 'priority', 'type': 'str'},
'sla_tier': {'key': 'slaTier', 'type': 'str'},
'suspend_on_idle_time_hours': {'key': 'suspendOnIdleTimeHours', 'type': 'long'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
'group_policy_name': {'key': 'groupPolicyName', 'type': 'str'},
}
def __init__(
self,
*,
instance_type: Optional[str] = None,
instance_types: Optional[List[str]] = None,
image_version: Optional[str] = None,
location: Optional[str] = None,
locations: Optional[List[str]] = None,
ai_super_computer_storage_data: Optional[Dict[str, "AISuperComputerStorageReferenceConfiguration"]] = None,
interactive: Optional[bool] = None,
scale_policy: Optional["AISuperComputerScalePolicy"] = None,
virtual_cluster_arm_id: Optional[str] = None,
tensorboard_log_directory: Optional[str] = None,
ssh_public_key: Optional[str] = None,
ssh_public_keys: Optional[List[str]] = None,
enable_azml_int: Optional[bool] = None,
priority: Optional[str] = None,
sla_tier: Optional[str] = None,
suspend_on_idle_time_hours: Optional[int] = None,
user_alias: Optional[str] = None,
quota_enforcement_resource_id: Optional[str] = None,
model_compute_specification_id: Optional[str] = None,
group_policy_name: Optional[str] = None,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword image_version:
:paramtype image_version: str
:keyword location:
:paramtype location: str
:keyword locations:
:paramtype locations: list[str]
:keyword ai_super_computer_storage_data: Dictionary of
:code:`<AISuperComputerStorageReferenceConfiguration>`.
:paramtype ai_super_computer_storage_data: dict[str,
~flow.models.AISuperComputerStorageReferenceConfiguration]
:keyword interactive:
:paramtype interactive: bool
:keyword scale_policy:
:paramtype scale_policy: ~flow.models.AISuperComputerScalePolicy
:keyword virtual_cluster_arm_id:
:paramtype virtual_cluster_arm_id: str
:keyword tensorboard_log_directory:
:paramtype tensorboard_log_directory: str
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword ssh_public_keys:
:paramtype ssh_public_keys: list[str]
:keyword enable_azml_int:
:paramtype enable_azml_int: bool
:keyword priority:
:paramtype priority: str
:keyword sla_tier:
:paramtype sla_tier: str
:keyword suspend_on_idle_time_hours:
:paramtype suspend_on_idle_time_hours: long
:keyword user_alias:
:paramtype user_alias: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
:keyword group_policy_name:
:paramtype group_policy_name: str
"""
super(AISuperComputerConfiguration, self).__init__(**kwargs)
self.instance_type = instance_type
self.instance_types = instance_types
self.image_version = image_version
self.location = location
self.locations = locations
self.ai_super_computer_storage_data = ai_super_computer_storage_data
self.interactive = interactive
self.scale_policy = scale_policy
self.virtual_cluster_arm_id = virtual_cluster_arm_id
self.tensorboard_log_directory = tensorboard_log_directory
self.ssh_public_key = ssh_public_key
self.ssh_public_keys = ssh_public_keys
self.enable_azml_int = enable_azml_int
self.priority = priority
self.sla_tier = sla_tier
self.suspend_on_idle_time_hours = suspend_on_idle_time_hours
self.user_alias = user_alias
self.quota_enforcement_resource_id = quota_enforcement_resource_id
self.model_compute_specification_id = model_compute_specification_id
self.group_policy_name = group_policy_name
class AISuperComputerScalePolicy(msrest.serialization.Model):
"""AISuperComputerScalePolicy.
:ivar auto_scale_instance_type_count_set:
:vartype auto_scale_instance_type_count_set: list[int]
:ivar auto_scale_interval_in_sec:
:vartype auto_scale_interval_in_sec: int
:ivar max_instance_type_count:
:vartype max_instance_type_count: int
:ivar min_instance_type_count:
:vartype min_instance_type_count: int
"""
_attribute_map = {
'auto_scale_instance_type_count_set': {'key': 'autoScaleInstanceTypeCountSet', 'type': '[int]'},
'auto_scale_interval_in_sec': {'key': 'autoScaleIntervalInSec', 'type': 'int'},
'max_instance_type_count': {'key': 'maxInstanceTypeCount', 'type': 'int'},
'min_instance_type_count': {'key': 'minInstanceTypeCount', 'type': 'int'},
}
def __init__(
self,
*,
auto_scale_instance_type_count_set: Optional[List[int]] = None,
auto_scale_interval_in_sec: Optional[int] = None,
max_instance_type_count: Optional[int] = None,
min_instance_type_count: Optional[int] = None,
**kwargs
):
"""
:keyword auto_scale_instance_type_count_set:
:paramtype auto_scale_instance_type_count_set: list[int]
:keyword auto_scale_interval_in_sec:
:paramtype auto_scale_interval_in_sec: int
:keyword max_instance_type_count:
:paramtype max_instance_type_count: int
:keyword min_instance_type_count:
:paramtype min_instance_type_count: int
"""
super(AISuperComputerScalePolicy, self).__init__(**kwargs)
self.auto_scale_instance_type_count_set = auto_scale_instance_type_count_set
self.auto_scale_interval_in_sec = auto_scale_interval_in_sec
self.max_instance_type_count = max_instance_type_count
self.min_instance_type_count = min_instance_type_count
class AISuperComputerStorageReferenceConfiguration(msrest.serialization.Model):
"""AISuperComputerStorageReferenceConfiguration.
:ivar container_name:
:vartype container_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'container_name': {'key': 'containerName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
container_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword container_name:
:paramtype container_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(AISuperComputerStorageReferenceConfiguration, self).__init__(**kwargs)
self.container_name = container_name
self.relative_path = relative_path
class AKSAdvanceSettings(msrest.serialization.Model):
"""AKSAdvanceSettings.
:ivar auto_scaler:
:vartype auto_scaler: ~flow.models.AutoScaler
:ivar container_resource_requirements:
:vartype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar num_replicas:
:vartype num_replicas: int
"""
_attribute_map = {
'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
}
def __init__(
self,
*,
auto_scaler: Optional["AutoScaler"] = None,
container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
app_insights_enabled: Optional[bool] = None,
scoring_timeout_ms: Optional[int] = None,
num_replicas: Optional[int] = None,
**kwargs
):
"""
:keyword auto_scaler:
:paramtype auto_scaler: ~flow.models.AutoScaler
:keyword container_resource_requirements:
:paramtype container_resource_requirements: ~flow.models.ContainerResourceRequirements
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword num_replicas:
:paramtype num_replicas: int
"""
super(AKSAdvanceSettings, self).__init__(**kwargs)
self.auto_scaler = auto_scaler
self.container_resource_requirements = container_resource_requirements
self.app_insights_enabled = app_insights_enabled
self.scoring_timeout_ms = scoring_timeout_ms
self.num_replicas = num_replicas
class AKSReplicaStatus(msrest.serialization.Model):
"""AKSReplicaStatus.
:ivar desired_replicas:
:vartype desired_replicas: int
:ivar updated_replicas:
:vartype updated_replicas: int
:ivar available_replicas:
:vartype available_replicas: int
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
"""
_attribute_map = {
'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
}
def __init__(
self,
*,
desired_replicas: Optional[int] = None,
updated_replicas: Optional[int] = None,
available_replicas: Optional[int] = None,
error: Optional["ModelManagementErrorResponse"] = None,
**kwargs
):
"""
:keyword desired_replicas:
:paramtype desired_replicas: int
:keyword updated_replicas:
:paramtype updated_replicas: int
:keyword available_replicas:
:paramtype available_replicas: int
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
"""
super(AKSReplicaStatus, self).__init__(**kwargs)
self.desired_replicas = desired_replicas
self.updated_replicas = updated_replicas
self.available_replicas = available_replicas
self.error = error
class AMLComputeConfiguration(msrest.serialization.Model):
"""AMLComputeConfiguration.
:ivar name:
:vartype name: str
:ivar vm_size:
:vartype vm_size: str
:ivar vm_priority: Possible values include: "Dedicated", "Lowpriority".
:vartype vm_priority: str or ~flow.models.VmPriority
:ivar retain_cluster:
:vartype retain_cluster: bool
:ivar cluster_max_node_count:
:vartype cluster_max_node_count: int
:ivar os_type:
:vartype os_type: str
:ivar virtual_machine_image:
:vartype virtual_machine_image: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'vm_priority': {'key': 'vmPriority', 'type': 'str'},
'retain_cluster': {'key': 'retainCluster', 'type': 'bool'},
'cluster_max_node_count': {'key': 'clusterMaxNodeCount', 'type': 'int'},
'os_type': {'key': 'osType', 'type': 'str'},
'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
vm_size: Optional[str] = None,
vm_priority: Optional[Union[str, "VmPriority"]] = None,
retain_cluster: Optional[bool] = None,
cluster_max_node_count: Optional[int] = None,
os_type: Optional[str] = None,
virtual_machine_image: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword vm_size:
:paramtype vm_size: str
:keyword vm_priority: Possible values include: "Dedicated", "Lowpriority".
:paramtype vm_priority: str or ~flow.models.VmPriority
:keyword retain_cluster:
:paramtype retain_cluster: bool
:keyword cluster_max_node_count:
:paramtype cluster_max_node_count: int
:keyword os_type:
:paramtype os_type: str
:keyword virtual_machine_image:
:paramtype virtual_machine_image: str
"""
super(AMLComputeConfiguration, self).__init__(**kwargs)
self.name = name
self.vm_size = vm_size
self.vm_priority = vm_priority
self.retain_cluster = retain_cluster
self.cluster_max_node_count = cluster_max_node_count
self.os_type = os_type
self.virtual_machine_image = virtual_machine_image
class AmlDataset(msrest.serialization.Model):
"""AmlDataset.
:ivar registered_data_set_reference:
:vartype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'registered_data_set_reference': {'key': 'registeredDataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
registered_data_set_reference: Optional["RegisteredDataSetReference"] = None,
saved_data_set_reference: Optional["SavedDataSetReference"] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword registered_data_set_reference:
:paramtype registered_data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(AmlDataset, self).__init__(**kwargs)
self.registered_data_set_reference = registered_data_set_reference
self.saved_data_set_reference = saved_data_set_reference
self.additional_transformations = additional_transformations
class AmlK8SConfiguration(msrest.serialization.Model):
"""AmlK8SConfiguration.
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfiguration
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
_attribute_map = {
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfiguration'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'AmlK8SPriorityConfiguration'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfiguration'},
}
def __init__(
self,
*,
resource_configuration: Optional["ResourceConfiguration"] = None,
priority_configuration: Optional["AmlK8SPriorityConfiguration"] = None,
interactive_configuration: Optional["InteractiveConfiguration"] = None,
**kwargs
):
"""
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfiguration
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.AmlK8SPriorityConfiguration
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfiguration
"""
super(AmlK8SConfiguration, self).__init__(**kwargs)
self.resource_configuration = resource_configuration
self.priority_configuration = priority_configuration
self.interactive_configuration = interactive_configuration
class AmlK8SPriorityConfiguration(msrest.serialization.Model):
"""AmlK8SPriorityConfiguration.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
*,
job_priority: Optional[int] = None,
is_preemptible: Optional[bool] = None,
node_count_set: Optional[List[int]] = None,
scale_interval: Optional[int] = None,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(AmlK8SPriorityConfiguration, self).__init__(**kwargs)
self.job_priority = job_priority
self.is_preemptible = is_preemptible
self.node_count_set = node_count_set
self.scale_interval = scale_interval
class AmlSparkCloudSetting(msrest.serialization.Model):
"""AmlSparkCloudSetting.
:ivar entry:
:vartype entry: ~flow.models.EntrySetting
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar inline_environment_definition_string:
:vartype inline_environment_definition_string: str
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar compute:
:vartype compute: str
:ivar resources:
:vartype resources: ~flow.models.ResourcesSetting
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
"""
_attribute_map = {
'entry': {'key': 'entry', 'type': 'EntrySetting'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'inline_environment_definition_string': {'key': 'inlineEnvironmentDefinitionString', 'type': 'str'},
'conf': {'key': 'conf', 'type': '{str}'},
'compute': {'key': 'compute', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'ResourcesSetting'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
}
def __init__(
self,
*,
entry: Optional["EntrySetting"] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
driver_memory: Optional[str] = None,
driver_cores: Optional[int] = None,
executor_memory: Optional[str] = None,
executor_cores: Optional[int] = None,
number_executors: Optional[int] = None,
environment_asset_id: Optional[str] = None,
environment_variables: Optional[Dict[str, str]] = None,
inline_environment_definition_string: Optional[str] = None,
conf: Optional[Dict[str, str]] = None,
compute: Optional[str] = None,
resources: Optional["ResourcesSetting"] = None,
identity: Optional["IdentitySetting"] = None,
**kwargs
):
"""
:keyword entry:
:paramtype entry: ~flow.models.EntrySetting
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword inline_environment_definition_string:
:paramtype inline_environment_definition_string: str
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword compute:
:paramtype compute: str
:keyword resources:
:paramtype resources: ~flow.models.ResourcesSetting
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
"""
super(AmlSparkCloudSetting, self).__init__(**kwargs)
self.entry = entry
self.files = files
self.archives = archives
self.jars = jars
self.py_files = py_files
self.driver_memory = driver_memory
self.driver_cores = driver_cores
self.executor_memory = executor_memory
self.executor_cores = executor_cores
self.number_executors = number_executors
self.environment_asset_id = environment_asset_id
self.environment_variables = environment_variables
self.inline_environment_definition_string = inline_environment_definition_string
self.conf = conf
self.compute = compute
self.resources = resources
self.identity = identity
class APCloudConfiguration(msrest.serialization.Model):
"""APCloudConfiguration.
:ivar referenced_ap_module_guid:
:vartype referenced_ap_module_guid: str
:ivar user_alias:
:vartype user_alias: str
:ivar aether_module_type:
:vartype aether_module_type: str
"""
_attribute_map = {
'referenced_ap_module_guid': {'key': 'referencedAPModuleGuid', 'type': 'str'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'aether_module_type': {'key': 'aetherModuleType', 'type': 'str'},
}
def __init__(
self,
*,
referenced_ap_module_guid: Optional[str] = None,
user_alias: Optional[str] = None,
aether_module_type: Optional[str] = None,
**kwargs
):
"""
:keyword referenced_ap_module_guid:
:paramtype referenced_ap_module_guid: str
:keyword user_alias:
:paramtype user_alias: str
:keyword aether_module_type:
:paramtype aether_module_type: str
"""
super(APCloudConfiguration, self).__init__(**kwargs)
self.referenced_ap_module_guid = referenced_ap_module_guid
self.user_alias = user_alias
self.aether_module_type = aether_module_type
class ApiAndParameters(msrest.serialization.Model):
"""ApiAndParameters.
:ivar api:
:vartype api: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:ivar default_prompt:
:vartype default_prompt: str
"""
_attribute_map = {
'api': {'key': 'api', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{FlowToolSettingParameter}'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
}
def __init__(
self,
*,
api: Optional[str] = None,
parameters: Optional[Dict[str, "FlowToolSettingParameter"]] = None,
default_prompt: Optional[str] = None,
**kwargs
):
"""
:keyword api:
:paramtype api: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, ~flow.models.FlowToolSettingParameter]
:keyword default_prompt:
:paramtype default_prompt: str
"""
super(ApiAndParameters, self).__init__(**kwargs)
self.api = api
self.parameters = parameters
self.default_prompt = default_prompt
class ApplicationEndpointConfiguration(msrest.serialization.Model):
"""ApplicationEndpointConfiguration.
:ivar type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard", "VSCode",
"Theia", "Grafana", "Custom", "RayDashboard".
:vartype type: str or ~flow.models.ApplicationEndpointType
:ivar port:
:vartype port: int
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.Nodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'Nodes'},
}
def __init__(
self,
*,
type: Optional[Union[str, "ApplicationEndpointType"]] = None,
port: Optional[int] = None,
properties: Optional[Dict[str, str]] = None,
nodes: Optional["Nodes"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "Jupyter", "JupyterLab", "SSH", "TensorBoard",
"VSCode", "Theia", "Grafana", "Custom", "RayDashboard".
:paramtype type: str or ~flow.models.ApplicationEndpointType
:keyword port:
:paramtype port: int
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.Nodes
"""
super(ApplicationEndpointConfiguration, self).__init__(**kwargs)
self.type = type
self.port = port
self.properties = properties
self.nodes = nodes
class ArgumentAssignment(msrest.serialization.Model):
"""ArgumentAssignment.
:ivar value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:vartype value_type: str or ~flow.models.ArgumentValueType
:ivar value:
:vartype value: str
:ivar nested_argument_list:
:vartype nested_argument_list: list[~flow.models.ArgumentAssignment]
:ivar string_interpolation_argument_list:
:vartype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'nested_argument_list': {'key': 'nestedArgumentList', 'type': '[ArgumentAssignment]'},
'string_interpolation_argument_list': {'key': 'stringInterpolationArgumentList', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
*,
value_type: Optional[Union[str, "ArgumentValueType"]] = None,
value: Optional[str] = None,
nested_argument_list: Optional[List["ArgumentAssignment"]] = None,
string_interpolation_argument_list: Optional[List["ArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "Parameter", "Input", "Output",
"NestedList", "StringInterpolationList".
:paramtype value_type: str or ~flow.models.ArgumentValueType
:keyword value:
:paramtype value: str
:keyword nested_argument_list:
:paramtype nested_argument_list: list[~flow.models.ArgumentAssignment]
:keyword string_interpolation_argument_list:
:paramtype string_interpolation_argument_list: list[~flow.models.ArgumentAssignment]
"""
super(ArgumentAssignment, self).__init__(**kwargs)
self.value_type = value_type
self.value = value
self.nested_argument_list = nested_argument_list
self.string_interpolation_argument_list = string_interpolation_argument_list
class Asset(msrest.serialization.Model):
"""Asset.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
asset_id: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(Asset, self).__init__(**kwargs)
self.asset_id = asset_id
self.type = type
class AssetDefinition(msrest.serialization.Model):
"""AssetDefinition.
:ivar path:
:vartype path: str
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar asset_id:
:vartype asset_id: str
:ivar serialized_asset_id:
:vartype serialized_asset_id: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'serialized_asset_id': {'key': 'serializedAssetId', 'type': 'str'},
}
def __init__(
self,
*,
path: Optional[str] = None,
type: Optional[Union[str, "AEVAAssetType"]] = None,
asset_id: Optional[str] = None,
serialized_asset_id: Optional[str] = None,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword asset_id:
:paramtype asset_id: str
:keyword serialized_asset_id:
:paramtype serialized_asset_id: str
"""
super(AssetDefinition, self).__init__(**kwargs)
self.path = path
self.type = type
self.asset_id = asset_id
self.serialized_asset_id = serialized_asset_id
class AssetNameAndVersionIdentifier(msrest.serialization.Model):
"""AssetNameAndVersionIdentifier.
:ivar asset_name:
:vartype asset_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
"""
_attribute_map = {
'asset_name': {'key': 'assetName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
}
def __init__(
self,
*,
asset_name: Optional[str] = None,
version: Optional[str] = None,
feed_name: Optional[str] = None,
**kwargs
):
"""
:keyword asset_name:
:paramtype asset_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
"""
super(AssetNameAndVersionIdentifier, self).__init__(**kwargs)
self.asset_name = asset_name
self.version = version
self.feed_name = feed_name
class AssetOutputSettings(msrest.serialization.Model):
"""AssetOutputSettings.
:ivar path:
:vartype path: str
:ivar path_parameter_assignment:
:vartype path_parameter_assignment: ~flow.models.ParameterAssignment
:ivar type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:vartype type: str or ~flow.models.AEVAAssetType
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'path_parameter_assignment': {'key': 'PathParameterAssignment', 'type': 'ParameterAssignment'},
'type': {'key': 'type', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
path: Optional[str] = None,
path_parameter_assignment: Optional["ParameterAssignment"] = None,
type: Optional[Union[str, "AEVAAssetType"]] = None,
options: Optional[Dict[str, str]] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword path_parameter_assignment:
:paramtype path_parameter_assignment: ~flow.models.ParameterAssignment
:keyword type: Possible values include: "UriFile", "UriFolder", "MLTable", "CustomModel",
"MLFlowModel", "TritonModel", "OpenAIModel".
:paramtype type: str or ~flow.models.AEVAAssetType
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(AssetOutputSettings, self).__init__(**kwargs)
self.path = path
self.path_parameter_assignment = path_parameter_assignment
self.type = type
self.options = options
self.data_store_mode = data_store_mode
self.name = name
self.version = version
class AssetOutputSettingsParameter(msrest.serialization.Model):
"""AssetOutputSettingsParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.AssetOutputSettings
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'AssetOutputSettings'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
default_value: Optional["AssetOutputSettings"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.AssetOutputSettings
"""
super(AssetOutputSettingsParameter, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.default_value = default_value
class AssetPublishResult(msrest.serialization.Model):
"""AssetPublishResult.
:ivar feed_name:
:vartype feed_name: str
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:vartype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'regional_publish_results': {'key': 'regionalPublishResults', 'type': '{AssetPublishSingleRegionResult}'},
}
def __init__(
self,
*,
feed_name: Optional[str] = None,
asset_name: Optional[str] = None,
asset_version: Optional[str] = None,
step_name: Optional[str] = None,
status: Optional[str] = None,
error_message: Optional[str] = None,
created_time: Optional[datetime.datetime] = None,
last_updated_time: Optional[datetime.datetime] = None,
regional_publish_results: Optional[Dict[str, "AssetPublishSingleRegionResult"]] = None,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword regional_publish_results: Dictionary of :code:`<AssetPublishSingleRegionResult>`.
:paramtype regional_publish_results: dict[str, ~flow.models.AssetPublishSingleRegionResult]
"""
super(AssetPublishResult, self).__init__(**kwargs)
self.feed_name = feed_name
self.asset_name = asset_name
self.asset_version = asset_version
self.step_name = step_name
self.status = status
self.error_message = error_message
self.created_time = created_time
self.last_updated_time = last_updated_time
self.regional_publish_results = regional_publish_results
class AssetPublishSingleRegionResult(msrest.serialization.Model):
"""AssetPublishSingleRegionResult.
:ivar step_name:
:vartype step_name: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar last_updated_time:
:vartype last_updated_time: ~datetime.datetime
:ivar total_steps:
:vartype total_steps: int
:ivar finished_steps:
:vartype finished_steps: int
:ivar remaining_steps:
:vartype remaining_steps: int
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'last_updated_time': {'key': 'lastUpdatedTime', 'type': 'iso-8601'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'finished_steps': {'key': 'finishedSteps', 'type': 'int'},
'remaining_steps': {'key': 'remainingSteps', 'type': 'int'},
}
def __init__(
self,
*,
step_name: Optional[str] = None,
status: Optional[str] = None,
error_message: Optional[str] = None,
last_updated_time: Optional[datetime.datetime] = None,
total_steps: Optional[int] = None,
finished_steps: Optional[int] = None,
remaining_steps: Optional[int] = None,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword last_updated_time:
:paramtype last_updated_time: ~datetime.datetime
:keyword total_steps:
:paramtype total_steps: int
:keyword finished_steps:
:paramtype finished_steps: int
:keyword remaining_steps:
:paramtype remaining_steps: int
"""
super(AssetPublishSingleRegionResult, self).__init__(**kwargs)
self.step_name = step_name
self.status = status
self.error_message = error_message
self.last_updated_time = last_updated_time
self.total_steps = total_steps
self.finished_steps = finished_steps
self.remaining_steps = remaining_steps
class AssetTypeMetaInfo(msrest.serialization.Model):
"""AssetTypeMetaInfo.
:ivar consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:vartype consumption_mode: str or ~flow.models.ConsumeMode
"""
_attribute_map = {
'consumption_mode': {'key': 'consumptionMode', 'type': 'str'},
}
def __init__(
self,
*,
consumption_mode: Optional[Union[str, "ConsumeMode"]] = None,
**kwargs
):
"""
:keyword consumption_mode: Possible values include: "Reference", "Copy", "CopyAndAutoUpgrade".
:paramtype consumption_mode: str or ~flow.models.ConsumeMode
"""
super(AssetTypeMetaInfo, self).__init__(**kwargs)
self.consumption_mode = consumption_mode
class AssetVersionPublishRequest(msrest.serialization.Model):
"""AssetVersionPublishRequest.
:ivar asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:vartype asset_type: str or ~flow.models.AssetType
:ivar asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:vartype asset_source_type: str or ~flow.models.AssetSourceType
:ivar yaml_file:
:vartype yaml_file: str
:ivar source_zip_url:
:vartype source_zip_url: str
:ivar source_zip_file:
:vartype source_zip_file: IO
:ivar feed_name:
:vartype feed_name: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar referenced_assets:
:vartype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:ivar flow_file:
:vartype flow_file: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'asset_type': {'key': 'assetType', 'type': 'str'},
'asset_source_type': {'key': 'assetSourceType', 'type': 'str'},
'yaml_file': {'key': 'yamlFile', 'type': 'str'},
'source_zip_url': {'key': 'sourceZipUrl', 'type': 'str'},
'source_zip_file': {'key': 'sourceZipFile', 'type': 'IO'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'referenced_assets': {'key': 'referencedAssets', 'type': '[AssetNameAndVersionIdentifier]'},
'flow_file': {'key': 'flowFile', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
asset_type: Optional[Union[str, "AssetType"]] = None,
asset_source_type: Optional[Union[str, "AssetSourceType"]] = None,
yaml_file: Optional[str] = None,
source_zip_url: Optional[str] = None,
source_zip_file: Optional[IO] = None,
feed_name: Optional[str] = None,
set_as_default_version: Optional[bool] = None,
referenced_assets: Optional[List["AssetNameAndVersionIdentifier"]] = None,
flow_file: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword asset_type: Possible values include: "Component", "Model", "Environment", "Dataset",
"DataStore", "SampleGraph", "FlowTool", "FlowToolSetting", "FlowConnection", "FlowSample",
"FlowRuntimeSpec".
:paramtype asset_type: str or ~flow.models.AssetType
:keyword asset_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip".
:paramtype asset_source_type: str or ~flow.models.AssetSourceType
:keyword yaml_file:
:paramtype yaml_file: str
:keyword source_zip_url:
:paramtype source_zip_url: str
:keyword source_zip_file:
:paramtype source_zip_file: IO
:keyword feed_name:
:paramtype feed_name: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword referenced_assets:
:paramtype referenced_assets: list[~flow.models.AssetNameAndVersionIdentifier]
:keyword flow_file:
:paramtype flow_file: str
:keyword version:
:paramtype version: str
"""
super(AssetVersionPublishRequest, self).__init__(**kwargs)
self.asset_type = asset_type
self.asset_source_type = asset_source_type
self.yaml_file = yaml_file
self.source_zip_url = source_zip_url
self.source_zip_file = source_zip_file
self.feed_name = feed_name
self.set_as_default_version = set_as_default_version
self.referenced_assets = referenced_assets
self.flow_file = flow_file
self.version = version
class AssignedUser(msrest.serialization.Model):
"""AssignedUser.
:ivar object_id:
:vartype object_id: str
:ivar tenant_id:
:vartype tenant_id: str
"""
_attribute_map = {
'object_id': {'key': 'objectId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(
self,
*,
object_id: Optional[str] = None,
tenant_id: Optional[str] = None,
**kwargs
):
"""
:keyword object_id:
:paramtype object_id: str
:keyword tenant_id:
:paramtype tenant_id: str
"""
super(AssignedUser, self).__init__(**kwargs)
self.object_id = object_id
self.tenant_id = tenant_id
class AuthKeys(msrest.serialization.Model):
"""AuthKeys.
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
"""
_attribute_map = {
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
}
def __init__(
self,
*,
primary_key: Optional[str] = None,
secondary_key: Optional[str] = None,
**kwargs
):
"""
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
"""
super(AuthKeys, self).__init__(**kwargs)
self.primary_key = primary_key
self.secondary_key = secondary_key
class AutoClusterComputeSpecification(msrest.serialization.Model):
"""AutoClusterComputeSpecification.
:ivar instance_size:
:vartype instance_size: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar os_type:
:vartype os_type: str
:ivar location:
:vartype location: str
:ivar runtime_version:
:vartype runtime_version: str
:ivar quota_enforcement_resource_id:
:vartype quota_enforcement_resource_id: str
:ivar model_compute_specification_id:
:vartype model_compute_specification_id: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
'quota_enforcement_resource_id': {'key': 'quotaEnforcementResourceId', 'type': 'str'},
'model_compute_specification_id': {'key': 'modelComputeSpecificationId', 'type': 'str'},
}
def __init__(
self,
*,
instance_size: Optional[str] = None,
instance_priority: Optional[str] = None,
os_type: Optional[str] = None,
location: Optional[str] = None,
runtime_version: Optional[str] = None,
quota_enforcement_resource_id: Optional[str] = None,
model_compute_specification_id: Optional[str] = None,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword os_type:
:paramtype os_type: str
:keyword location:
:paramtype location: str
:keyword runtime_version:
:paramtype runtime_version: str
:keyword quota_enforcement_resource_id:
:paramtype quota_enforcement_resource_id: str
:keyword model_compute_specification_id:
:paramtype model_compute_specification_id: str
"""
super(AutoClusterComputeSpecification, self).__init__(**kwargs)
self.instance_size = instance_size
self.instance_priority = instance_priority
self.os_type = os_type
self.location = location
self.runtime_version = runtime_version
self.quota_enforcement_resource_id = quota_enforcement_resource_id
self.model_compute_specification_id = model_compute_specification_id
class AutoDeleteSetting(msrest.serialization.Model):
"""AutoDeleteSetting.
:ivar condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:vartype condition: str or ~flow.models.AutoDeleteCondition
:ivar value:
:vartype value: str
"""
_attribute_map = {
'condition': {'key': 'condition', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
condition: Optional[Union[str, "AutoDeleteCondition"]] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword condition: Possible values include: "CreatedGreaterThan", "LastAccessedGreaterThan".
:paramtype condition: str or ~flow.models.AutoDeleteCondition
:keyword value:
:paramtype value: str
"""
super(AutoDeleteSetting, self).__init__(**kwargs)
self.condition = condition
self.value = value
class AutoFeaturizeConfiguration(msrest.serialization.Model):
"""AutoFeaturizeConfiguration.
:ivar featurization_config:
:vartype featurization_config: ~flow.models.FeaturizationSettings
"""
_attribute_map = {
'featurization_config': {'key': 'featurizationConfig', 'type': 'FeaturizationSettings'},
}
def __init__(
self,
*,
featurization_config: Optional["FeaturizationSettings"] = None,
**kwargs
):
"""
:keyword featurization_config:
:paramtype featurization_config: ~flow.models.FeaturizationSettings
"""
super(AutoFeaturizeConfiguration, self).__init__(**kwargs)
self.featurization_config = featurization_config
class AutologgerSettings(msrest.serialization.Model):
"""AutologgerSettings.
:ivar ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:vartype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
_attribute_map = {
'ml_flow_autologger': {'key': 'mlFlowAutologger', 'type': 'str'},
}
def __init__(
self,
*,
ml_flow_autologger: Optional[Union[str, "MLFlowAutologgerState"]] = None,
**kwargs
):
"""
:keyword ml_flow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype ml_flow_autologger: str or ~flow.models.MLFlowAutologgerState
"""
super(AutologgerSettings, self).__init__(**kwargs)
self.ml_flow_autologger = ml_flow_autologger
class AutoMLComponentConfiguration(msrest.serialization.Model):
"""AutoMLComponentConfiguration.
:ivar auto_train_config:
:vartype auto_train_config: ~flow.models.AutoTrainConfiguration
:ivar auto_featurize_config:
:vartype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
_attribute_map = {
'auto_train_config': {'key': 'autoTrainConfig', 'type': 'AutoTrainConfiguration'},
'auto_featurize_config': {'key': 'autoFeaturizeConfig', 'type': 'AutoFeaturizeConfiguration'},
}
def __init__(
self,
*,
auto_train_config: Optional["AutoTrainConfiguration"] = None,
auto_featurize_config: Optional["AutoFeaturizeConfiguration"] = None,
**kwargs
):
"""
:keyword auto_train_config:
:paramtype auto_train_config: ~flow.models.AutoTrainConfiguration
:keyword auto_featurize_config:
:paramtype auto_featurize_config: ~flow.models.AutoFeaturizeConfiguration
"""
super(AutoMLComponentConfiguration, self).__init__(**kwargs)
self.auto_train_config = auto_train_config
self.auto_featurize_config = auto_featurize_config
class AutoScaler(msrest.serialization.Model):
"""AutoScaler.
:ivar autoscale_enabled:
:vartype autoscale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
"""
_attribute_map = {
'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
}
def __init__(
self,
*,
autoscale_enabled: Optional[bool] = None,
min_replicas: Optional[int] = None,
max_replicas: Optional[int] = None,
target_utilization: Optional[int] = None,
refresh_period_in_seconds: Optional[int] = None,
**kwargs
):
"""
:keyword autoscale_enabled:
:paramtype autoscale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
"""
super(AutoScaler, self).__init__(**kwargs)
self.autoscale_enabled = autoscale_enabled
self.min_replicas = min_replicas
self.max_replicas = max_replicas
self.target_utilization = target_utilization
self.refresh_period_in_seconds = refresh_period_in_seconds
class AutoTrainConfiguration(msrest.serialization.Model):
"""AutoTrainConfiguration.
:ivar general_settings:
:vartype general_settings: ~flow.models.GeneralSettings
:ivar limit_settings:
:vartype limit_settings: ~flow.models.LimitSettings
:ivar data_settings:
:vartype data_settings: ~flow.models.DataSettings
:ivar forecasting_settings:
:vartype forecasting_settings: ~flow.models.ForecastingSettings
:ivar training_settings:
:vartype training_settings: ~flow.models.TrainingSettings
:ivar sweep_settings:
:vartype sweep_settings: ~flow.models.SweepSettings
:ivar image_model_settings: Dictionary of :code:`<any>`.
:vartype image_model_settings: dict[str, any]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar compute_configuration:
:vartype compute_configuration: ~flow.models.AEVAComputeConfiguration
:ivar resource_configurtion:
:vartype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:ivar environment_id:
:vartype environment_id: str
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
"""
_attribute_map = {
'general_settings': {'key': 'generalSettings', 'type': 'GeneralSettings'},
'limit_settings': {'key': 'limitSettings', 'type': 'LimitSettings'},
'data_settings': {'key': 'dataSettings', 'type': 'DataSettings'},
'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'},
'training_settings': {'key': 'trainingSettings', 'type': 'TrainingSettings'},
'sweep_settings': {'key': 'sweepSettings', 'type': 'SweepSettings'},
'image_model_settings': {'key': 'imageModelSettings', 'type': '{object}'},
'properties': {'key': 'properties', 'type': '{str}'},
'compute_configuration': {'key': 'computeConfiguration', 'type': 'AEVAComputeConfiguration'},
'resource_configurtion': {'key': 'resourceConfigurtion', 'type': 'AEVAResourceConfiguration'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
}
def __init__(
self,
*,
general_settings: Optional["GeneralSettings"] = None,
limit_settings: Optional["LimitSettings"] = None,
data_settings: Optional["DataSettings"] = None,
forecasting_settings: Optional["ForecastingSettings"] = None,
training_settings: Optional["TrainingSettings"] = None,
sweep_settings: Optional["SweepSettings"] = None,
image_model_settings: Optional[Dict[str, Any]] = None,
properties: Optional[Dict[str, str]] = None,
compute_configuration: Optional["AEVAComputeConfiguration"] = None,
resource_configurtion: Optional["AEVAResourceConfiguration"] = None,
environment_id: Optional[str] = None,
environment_variables: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword general_settings:
:paramtype general_settings: ~flow.models.GeneralSettings
:keyword limit_settings:
:paramtype limit_settings: ~flow.models.LimitSettings
:keyword data_settings:
:paramtype data_settings: ~flow.models.DataSettings
:keyword forecasting_settings:
:paramtype forecasting_settings: ~flow.models.ForecastingSettings
:keyword training_settings:
:paramtype training_settings: ~flow.models.TrainingSettings
:keyword sweep_settings:
:paramtype sweep_settings: ~flow.models.SweepSettings
:keyword image_model_settings: Dictionary of :code:`<any>`.
:paramtype image_model_settings: dict[str, any]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword compute_configuration:
:paramtype compute_configuration: ~flow.models.AEVAComputeConfiguration
:keyword resource_configurtion:
:paramtype resource_configurtion: ~flow.models.AEVAResourceConfiguration
:keyword environment_id:
:paramtype environment_id: str
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
"""
super(AutoTrainConfiguration, self).__init__(**kwargs)
self.general_settings = general_settings
self.limit_settings = limit_settings
self.data_settings = data_settings
self.forecasting_settings = forecasting_settings
self.training_settings = training_settings
self.sweep_settings = sweep_settings
self.image_model_settings = image_model_settings
self.properties = properties
self.compute_configuration = compute_configuration
self.resource_configurtion = resource_configurtion
self.environment_id = environment_id
self.environment_variables = environment_variables
class AvailabilityResponse(msrest.serialization.Model):
"""AvailabilityResponse.
:ivar is_available:
:vartype is_available: bool
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'is_available': {'key': 'isAvailable', 'type': 'bool'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
*,
is_available: Optional[bool] = None,
error: Optional["ErrorResponse"] = None,
**kwargs
):
"""
:keyword is_available:
:paramtype is_available: bool
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(AvailabilityResponse, self).__init__(**kwargs)
self.is_available = is_available
self.error = error
class AzureBlobReference(msrest.serialization.Model):
"""AzureBlobReference.
:ivar container:
:vartype container: str
:ivar sas_token:
:vartype sas_token: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'container': {'key': 'container', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
container: Optional[str] = None,
sas_token: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword container:
:paramtype container: str
:keyword sas_token:
:paramtype sas_token: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureBlobReference, self).__init__(**kwargs)
self.container = container
self.sas_token = sas_token
self.uri = uri
self.account = account
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class AzureDatabaseReference(msrest.serialization.Model):
"""AzureDatabaseReference.
:ivar table_name:
:vartype table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'table_name': {'key': 'tableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
table_name: Optional[str] = None,
sql_query: Optional[str] = None,
stored_procedure_name: Optional[str] = None,
stored_procedure_parameters: Optional[List["StoredProcedureParameter"]] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword table_name:
:paramtype table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDatabaseReference, self).__init__(**kwargs)
self.table_name = table_name
self.sql_query = sql_query
self.stored_procedure_name = stored_procedure_name
self.stored_procedure_parameters = stored_procedure_parameters
self.aml_data_store_name = aml_data_store_name
class AzureDataLakeGen2Reference(msrest.serialization.Model):
"""AzureDataLakeGen2Reference.
:ivar file_system_name:
:vartype file_system_name: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
file_system_name: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword file_system_name:
:paramtype file_system_name: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeGen2Reference, self).__init__(**kwargs)
self.file_system_name = file_system_name
self.uri = uri
self.account = account
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class AzureDataLakeReference(msrest.serialization.Model):
"""AzureDataLakeReference.
:ivar tenant:
:vartype tenant: str
:ivar subscription:
:vartype subscription: str
:ivar resource_group:
:vartype resource_group: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'tenant': {'key': 'tenant', 'type': 'str'},
'subscription': {'key': 'subscription', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
tenant: Optional[str] = None,
subscription: Optional[str] = None,
resource_group: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword tenant:
:paramtype tenant: str
:keyword subscription:
:paramtype subscription: str
:keyword resource_group:
:paramtype resource_group: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureDataLakeReference, self).__init__(**kwargs)
self.tenant = tenant
self.subscription = subscription
self.resource_group = resource_group
self.uri = uri
self.account = account
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class AzureFilesReference(msrest.serialization.Model):
"""AzureFilesReference.
:ivar share:
:vartype share: str
:ivar uri:
:vartype uri: str
:ivar account:
:vartype account: str
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'share': {'key': 'share', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'account': {'key': 'account', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
share: Optional[str] = None,
uri: Optional[str] = None,
account: Optional[str] = None,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword share:
:paramtype share: str
:keyword uri:
:paramtype uri: str
:keyword account:
:paramtype account: str
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(AzureFilesReference, self).__init__(**kwargs)
self.share = share
self.uri = uri
self.account = account
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class AzureMLModuleVersionDescriptor(msrest.serialization.Model):
"""AzureMLModuleVersionDescriptor.
:ivar module_version_id:
:vartype module_version_id: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
module_version_id: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword module_version_id:
:paramtype module_version_id: str
:keyword version:
:paramtype version: str
"""
super(AzureMLModuleVersionDescriptor, self).__init__(**kwargs)
self.module_version_id = module_version_id
self.version = version
class AzureOpenAIDeploymentDto(msrest.serialization.Model):
"""AzureOpenAIDeploymentDto.
:ivar name:
:vartype name: str
:ivar model_name:
:vartype model_name: str
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'model_name': {'key': 'modelName', 'type': 'str'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
}
def __init__(
self,
*,
name: Optional[str] = None,
model_name: Optional[str] = None,
capabilities: Optional["AzureOpenAIModelCapabilities"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword model_name:
:paramtype model_name: str
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
"""
super(AzureOpenAIDeploymentDto, self).__init__(**kwargs)
self.name = name
self.model_name = model_name
self.capabilities = capabilities
class AzureOpenAIModelCapabilities(msrest.serialization.Model):
"""AzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'completion', 'type': 'bool'},
'chat_completion': {'key': 'chat_completion', 'type': 'bool'},
'embeddings': {'key': 'embeddings', 'type': 'bool'},
}
def __init__(
self,
*,
completion: Optional[bool] = None,
chat_completion: Optional[bool] = None,
embeddings: Optional[bool] = None,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(AzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = completion
self.chat_completion = chat_completion
self.embeddings = embeddings
class BatchAiComputeInfo(msrest.serialization.Model):
"""BatchAiComputeInfo.
:ivar batch_ai_subscription_id:
:vartype batch_ai_subscription_id: str
:ivar batch_ai_resource_group:
:vartype batch_ai_resource_group: str
:ivar batch_ai_workspace_name:
:vartype batch_ai_workspace_name: str
:ivar cluster_name:
:vartype cluster_name: str
:ivar native_shared_directory:
:vartype native_shared_directory: str
"""
_attribute_map = {
'batch_ai_subscription_id': {'key': 'batchAiSubscriptionId', 'type': 'str'},
'batch_ai_resource_group': {'key': 'batchAiResourceGroup', 'type': 'str'},
'batch_ai_workspace_name': {'key': 'batchAiWorkspaceName', 'type': 'str'},
'cluster_name': {'key': 'clusterName', 'type': 'str'},
'native_shared_directory': {'key': 'nativeSharedDirectory', 'type': 'str'},
}
def __init__(
self,
*,
batch_ai_subscription_id: Optional[str] = None,
batch_ai_resource_group: Optional[str] = None,
batch_ai_workspace_name: Optional[str] = None,
cluster_name: Optional[str] = None,
native_shared_directory: Optional[str] = None,
**kwargs
):
"""
:keyword batch_ai_subscription_id:
:paramtype batch_ai_subscription_id: str
:keyword batch_ai_resource_group:
:paramtype batch_ai_resource_group: str
:keyword batch_ai_workspace_name:
:paramtype batch_ai_workspace_name: str
:keyword cluster_name:
:paramtype cluster_name: str
:keyword native_shared_directory:
:paramtype native_shared_directory: str
"""
super(BatchAiComputeInfo, self).__init__(**kwargs)
self.batch_ai_subscription_id = batch_ai_subscription_id
self.batch_ai_resource_group = batch_ai_resource_group
self.batch_ai_workspace_name = batch_ai_workspace_name
self.cluster_name = cluster_name
self.native_shared_directory = native_shared_directory
class BatchDataInput(msrest.serialization.Model):
"""BatchDataInput.
:ivar data_uri:
:vartype data_uri: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'data_uri': {'key': 'dataUri', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
data_uri: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword data_uri:
:paramtype data_uri: str
:keyword type:
:paramtype type: str
"""
super(BatchDataInput, self).__init__(**kwargs)
self.data_uri = data_uri
self.type = type
class BatchExportComponentSpecResponse(msrest.serialization.Model):
"""BatchExportComponentSpecResponse.
:ivar component_spec_meta_infos:
:vartype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'component_spec_meta_infos': {'key': 'componentSpecMetaInfos', 'type': '[ComponentSpecMetaInfo]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
*,
component_spec_meta_infos: Optional[List["ComponentSpecMetaInfo"]] = None,
errors: Optional[List["ErrorResponse"]] = None,
**kwargs
):
"""
:keyword component_spec_meta_infos:
:paramtype component_spec_meta_infos: list[~flow.models.ComponentSpecMetaInfo]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportComponentSpecResponse, self).__init__(**kwargs)
self.component_spec_meta_infos = component_spec_meta_infos
self.errors = errors
class BatchExportRawComponentResponse(msrest.serialization.Model):
"""BatchExportRawComponentResponse.
:ivar raw_component_dtos:
:vartype raw_component_dtos: list[~flow.models.RawComponentDto]
:ivar errors:
:vartype errors: list[~flow.models.ErrorResponse]
"""
_attribute_map = {
'raw_component_dtos': {'key': 'rawComponentDtos', 'type': '[RawComponentDto]'},
'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
}
def __init__(
self,
*,
raw_component_dtos: Optional[List["RawComponentDto"]] = None,
errors: Optional[List["ErrorResponse"]] = None,
**kwargs
):
"""
:keyword raw_component_dtos:
:paramtype raw_component_dtos: list[~flow.models.RawComponentDto]
:keyword errors:
:paramtype errors: list[~flow.models.ErrorResponse]
"""
super(BatchExportRawComponentResponse, self).__init__(**kwargs)
self.raw_component_dtos = raw_component_dtos
self.errors = errors
class BatchGetComponentHashesRequest(msrest.serialization.Model):
"""BatchGetComponentHashesRequest.
:ivar module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:vartype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:ivar module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:vartype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
_attribute_map = {
'module_hash_version': {'key': 'moduleHashVersion', 'type': 'str'},
'module_entities': {'key': 'moduleEntities', 'type': '{AetherModuleEntity}'},
}
def __init__(
self,
*,
module_hash_version: Optional[Union[str, "AetherModuleHashVersion"]] = None,
module_entities: Optional[Dict[str, "AetherModuleEntity"]] = None,
**kwargs
):
"""
:keyword module_hash_version: Possible values include: "IdentifierHash", "IdentifierHashV2".
:paramtype module_hash_version: str or ~flow.models.AetherModuleHashVersion
:keyword module_entities: Dictionary of :code:`<AetherModuleEntity>`.
:paramtype module_entities: dict[str, ~flow.models.AetherModuleEntity]
"""
super(BatchGetComponentHashesRequest, self).__init__(**kwargs)
self.module_hash_version = module_hash_version
self.module_entities = module_entities
class BatchGetComponentRequest(msrest.serialization.Model):
"""BatchGetComponentRequest.
:ivar version_ids:
:vartype version_ids: list[str]
:ivar name_and_versions:
:vartype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
_attribute_map = {
'version_ids': {'key': 'versionIds', 'type': '[str]'},
'name_and_versions': {'key': 'nameAndVersions', 'type': '[ComponentNameMetaInfo]'},
}
def __init__(
self,
*,
version_ids: Optional[List[str]] = None,
name_and_versions: Optional[List["ComponentNameMetaInfo"]] = None,
**kwargs
):
"""
:keyword version_ids:
:paramtype version_ids: list[str]
:keyword name_and_versions:
:paramtype name_and_versions: list[~flow.models.ComponentNameMetaInfo]
"""
super(BatchGetComponentRequest, self).__init__(**kwargs)
self.version_ids = version_ids
self.name_and_versions = name_and_versions
class Binding(msrest.serialization.Model):
"""Binding.
:ivar binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:vartype binding_type: str
"""
_attribute_map = {
'binding_type': {'key': 'bindingType', 'type': 'str'},
}
def __init__(
self,
*,
binding_type: Optional[str] = None,
**kwargs
):
"""
:keyword binding_type: The only acceptable values to pass in are None and "Basic". The default
value is None.
:paramtype binding_type: str
"""
super(Binding, self).__init__(**kwargs)
self.binding_type = binding_type
class BulkTestDto(msrest.serialization.Model):
"""BulkTestDto.
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar runtime:
:vartype runtime: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar evaluation_count:
:vartype evaluation_count: int
:ivar variant_count:
:vartype variant_count: int
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
"""
_attribute_map = {
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'runtime': {'key': 'runtime', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'evaluation_count': {'key': 'evaluationCount', 'type': 'int'},
'variant_count': {'key': 'variantCount', 'type': 'int'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
}
def __init__(
self,
*,
bulk_test_id: Optional[str] = None,
display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
runtime: Optional[str] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
created_on: Optional[datetime.datetime] = None,
evaluation_count: Optional[int] = None,
variant_count: Optional[int] = None,
flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None,
inputs: Optional[Dict[str, "FlowInputDefinition"]] = None,
outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
**kwargs
):
"""
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword runtime:
:paramtype runtime: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword evaluation_count:
:paramtype evaluation_count: int
:keyword variant_count:
:paramtype variant_count: int
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
"""
super(BulkTestDto, self).__init__(**kwargs)
self.bulk_test_id = bulk_test_id
self.display_name = display_name
self.description = description
self.tags = tags
self.runtime = runtime
self.created_by = created_by
self.created_on = created_on
self.evaluation_count = evaluation_count
self.variant_count = variant_count
self.flow_submit_run_settings = flow_submit_run_settings
self.inputs = inputs
self.outputs = outputs
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
class CloudError(msrest.serialization.Model):
"""CloudError.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.CloudError]
:ivar additional_info:
:vartype additional_info: list[~flow.models.AdditionalErrorInfo]
"""
_validation = {
'details': {'readonly': True},
'additional_info': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudError]'},
'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(CloudError, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = None
self.additional_info = None
class CloudPrioritySetting(msrest.serialization.Model):
"""CloudPrioritySetting.
:ivar scope_priority:
:vartype scope_priority: ~flow.models.PriorityConfiguration
:ivar aml_compute_priority:
:vartype aml_compute_priority: ~flow.models.PriorityConfiguration
:ivar itp_priority:
:vartype itp_priority: ~flow.models.PriorityConfiguration
:ivar singularity_priority:
:vartype singularity_priority: ~flow.models.PriorityConfiguration
"""
_attribute_map = {
'scope_priority': {'key': 'scopePriority', 'type': 'PriorityConfiguration'},
'aml_compute_priority': {'key': 'AmlComputePriority', 'type': 'PriorityConfiguration'},
'itp_priority': {'key': 'ItpPriority', 'type': 'PriorityConfiguration'},
'singularity_priority': {'key': 'SingularityPriority', 'type': 'PriorityConfiguration'},
}
def __init__(
self,
*,
scope_priority: Optional["PriorityConfiguration"] = None,
aml_compute_priority: Optional["PriorityConfiguration"] = None,
itp_priority: Optional["PriorityConfiguration"] = None,
singularity_priority: Optional["PriorityConfiguration"] = None,
**kwargs
):
"""
:keyword scope_priority:
:paramtype scope_priority: ~flow.models.PriorityConfiguration
:keyword aml_compute_priority:
:paramtype aml_compute_priority: ~flow.models.PriorityConfiguration
:keyword itp_priority:
:paramtype itp_priority: ~flow.models.PriorityConfiguration
:keyword singularity_priority:
:paramtype singularity_priority: ~flow.models.PriorityConfiguration
"""
super(CloudPrioritySetting, self).__init__(**kwargs)
self.scope_priority = scope_priority
self.aml_compute_priority = aml_compute_priority
self.itp_priority = itp_priority
self.singularity_priority = singularity_priority
class CloudSettings(msrest.serialization.Model):
"""CloudSettings.
:ivar linked_settings:
:vartype linked_settings: list[~flow.models.ParameterAssignment]
:ivar priority_config:
:vartype priority_config: ~flow.models.PriorityConfiguration
:ivar hdi_run_config:
:vartype hdi_run_config: ~flow.models.HdiRunConfiguration
:ivar sub_graph_config:
:vartype sub_graph_config: ~flow.models.SubGraphConfiguration
:ivar auto_ml_component_config:
:vartype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:ivar ap_cloud_config:
:vartype ap_cloud_config: ~flow.models.APCloudConfiguration
:ivar scope_cloud_config:
:vartype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:ivar es_cloud_config:
:vartype es_cloud_config: ~flow.models.EsCloudConfiguration
:ivar data_transfer_cloud_config:
:vartype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:ivar aml_spark_cloud_setting:
:vartype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:ivar data_transfer_v2_cloud_setting:
:vartype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
_attribute_map = {
'linked_settings': {'key': 'linkedSettings', 'type': '[ParameterAssignment]'},
'priority_config': {'key': 'priorityConfig', 'type': 'PriorityConfiguration'},
'hdi_run_config': {'key': 'hdiRunConfig', 'type': 'HdiRunConfiguration'},
'sub_graph_config': {'key': 'subGraphConfig', 'type': 'SubGraphConfiguration'},
'auto_ml_component_config': {'key': 'autoMLComponentConfig', 'type': 'AutoMLComponentConfiguration'},
'ap_cloud_config': {'key': 'apCloudConfig', 'type': 'APCloudConfiguration'},
'scope_cloud_config': {'key': 'scopeCloudConfig', 'type': 'ScopeCloudConfiguration'},
'es_cloud_config': {'key': 'esCloudConfig', 'type': 'EsCloudConfiguration'},
'data_transfer_cloud_config': {'key': 'dataTransferCloudConfig', 'type': 'DataTransferCloudConfiguration'},
'aml_spark_cloud_setting': {'key': 'amlSparkCloudSetting', 'type': 'AmlSparkCloudSetting'},
'data_transfer_v2_cloud_setting': {'key': 'dataTransferV2CloudSetting', 'type': 'DataTransferV2CloudSetting'},
}
def __init__(
self,
*,
linked_settings: Optional[List["ParameterAssignment"]] = None,
priority_config: Optional["PriorityConfiguration"] = None,
hdi_run_config: Optional["HdiRunConfiguration"] = None,
sub_graph_config: Optional["SubGraphConfiguration"] = None,
auto_ml_component_config: Optional["AutoMLComponentConfiguration"] = None,
ap_cloud_config: Optional["APCloudConfiguration"] = None,
scope_cloud_config: Optional["ScopeCloudConfiguration"] = None,
es_cloud_config: Optional["EsCloudConfiguration"] = None,
data_transfer_cloud_config: Optional["DataTransferCloudConfiguration"] = None,
aml_spark_cloud_setting: Optional["AmlSparkCloudSetting"] = None,
data_transfer_v2_cloud_setting: Optional["DataTransferV2CloudSetting"] = None,
**kwargs
):
"""
:keyword linked_settings:
:paramtype linked_settings: list[~flow.models.ParameterAssignment]
:keyword priority_config:
:paramtype priority_config: ~flow.models.PriorityConfiguration
:keyword hdi_run_config:
:paramtype hdi_run_config: ~flow.models.HdiRunConfiguration
:keyword sub_graph_config:
:paramtype sub_graph_config: ~flow.models.SubGraphConfiguration
:keyword auto_ml_component_config:
:paramtype auto_ml_component_config: ~flow.models.AutoMLComponentConfiguration
:keyword ap_cloud_config:
:paramtype ap_cloud_config: ~flow.models.APCloudConfiguration
:keyword scope_cloud_config:
:paramtype scope_cloud_config: ~flow.models.ScopeCloudConfiguration
:keyword es_cloud_config:
:paramtype es_cloud_config: ~flow.models.EsCloudConfiguration
:keyword data_transfer_cloud_config:
:paramtype data_transfer_cloud_config: ~flow.models.DataTransferCloudConfiguration
:keyword aml_spark_cloud_setting:
:paramtype aml_spark_cloud_setting: ~flow.models.AmlSparkCloudSetting
:keyword data_transfer_v2_cloud_setting:
:paramtype data_transfer_v2_cloud_setting: ~flow.models.DataTransferV2CloudSetting
"""
super(CloudSettings, self).__init__(**kwargs)
self.linked_settings = linked_settings
self.priority_config = priority_config
self.hdi_run_config = hdi_run_config
self.sub_graph_config = sub_graph_config
self.auto_ml_component_config = auto_ml_component_config
self.ap_cloud_config = ap_cloud_config
self.scope_cloud_config = scope_cloud_config
self.es_cloud_config = es_cloud_config
self.data_transfer_cloud_config = data_transfer_cloud_config
self.aml_spark_cloud_setting = aml_spark_cloud_setting
self.data_transfer_v2_cloud_setting = data_transfer_v2_cloud_setting
class ColumnTransformer(msrest.serialization.Model):
"""ColumnTransformer.
:ivar fields:
:vartype fields: list[str]
:ivar parameters: Anything.
:vartype parameters: any
"""
_attribute_map = {
'fields': {'key': 'fields', 'type': '[str]'},
'parameters': {'key': 'parameters', 'type': 'object'},
}
def __init__(
self,
*,
fields: Optional[List[str]] = None,
parameters: Optional[Any] = None,
**kwargs
):
"""
:keyword fields:
:paramtype fields: list[str]
:keyword parameters: Anything.
:paramtype parameters: any
"""
super(ColumnTransformer, self).__init__(**kwargs)
self.fields = fields
self.parameters = parameters
class CommandJob(msrest.serialization.Model):
"""CommandJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar code_id:
:vartype code_id: str
:ivar command:
:vartype command: str
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar distribution:
:vartype distribution: ~flow.models.DistributionConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:ivar limits:
:vartype limits: ~flow.models.CommandJobLimits
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_validation = {
'command': {'min_length': 1},
}
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'MfeInternalAutologgerSettings'},
'limits': {'key': 'limits', 'type': 'CommandJobLimits'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
job_type: Optional[Union[str, "JobType"]] = None,
code_id: Optional[str] = None,
command: Optional[str] = None,
environment_id: Optional[str] = None,
input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None,
output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None,
distribution: Optional["DistributionConfiguration"] = None,
environment_variables: Optional[Dict[str, str]] = None,
parameters: Optional[Dict[str, Any]] = None,
autologger_settings: Optional["MfeInternalAutologgerSettings"] = None,
limits: Optional["CommandJobLimits"] = None,
provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None,
parent_job_name: Optional[str] = None,
display_name: Optional[str] = None,
experiment_name: Optional[str] = None,
status: Optional[Union[str, "JobStatus"]] = None,
interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None,
identity: Optional["MfeInternalIdentityConfiguration"] = None,
compute: Optional["ComputeConfiguration"] = None,
priority: Optional[int] = None,
output: Optional["JobOutputArtifacts"] = None,
is_archived: Optional[bool] = None,
schedule: Optional["ScheduleBase"] = None,
component_id: Optional[str] = None,
notification_setting: Optional["NotificationSetting"] = None,
secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword code_id:
:paramtype code_id: str
:keyword command:
:paramtype command: str
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword distribution:
:paramtype distribution: ~flow.models.DistributionConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.MfeInternalAutologgerSettings
:keyword limits:
:paramtype limits: ~flow.models.CommandJobLimits
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CommandJob, self).__init__(**kwargs)
self.job_type = job_type
self.code_id = code_id
self.command = command
self.environment_id = environment_id
self.input_data_bindings = input_data_bindings
self.output_data_bindings = output_data_bindings
self.distribution = distribution
self.environment_variables = environment_variables
self.parameters = parameters
self.autologger_settings = autologger_settings
self.limits = limits
self.provisioning_state = provisioning_state
self.parent_job_name = parent_job_name
self.display_name = display_name
self.experiment_name = experiment_name
self.status = status
self.interaction_endpoints = interaction_endpoints
self.identity = identity
self.compute = compute
self.priority = priority
self.output = output
self.is_archived = is_archived
self.schedule = schedule
self.component_id = component_id
self.notification_setting = notification_setting
self.secrets_configuration = secrets_configuration
self.description = description
self.tags = tags
self.properties = properties
class CommandJobLimits(msrest.serialization.Model):
"""CommandJobLimits.
:ivar job_limits_type: Possible values include: "Command", "Sweep".
:vartype job_limits_type: str or ~flow.models.JobLimitsType
:ivar timeout:
:vartype timeout: str
"""
_attribute_map = {
'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'},
'timeout': {'key': 'timeout', 'type': 'str'},
}
def __init__(
self,
*,
job_limits_type: Optional[Union[str, "JobLimitsType"]] = None,
timeout: Optional[str] = None,
**kwargs
):
"""
:keyword job_limits_type: Possible values include: "Command", "Sweep".
:paramtype job_limits_type: str or ~flow.models.JobLimitsType
:keyword timeout:
:paramtype timeout: str
"""
super(CommandJobLimits, self).__init__(**kwargs)
self.job_limits_type = job_limits_type
self.timeout = timeout
class CommandReturnCodeConfig(msrest.serialization.Model):
"""CommandReturnCodeConfig.
:ivar return_code: Possible values include: "Zero", "ZeroOrGreater".
:vartype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:ivar successful_return_codes:
:vartype successful_return_codes: list[int]
"""
_attribute_map = {
'return_code': {'key': 'returnCode', 'type': 'str'},
'successful_return_codes': {'key': 'successfulReturnCodes', 'type': '[int]'},
}
def __init__(
self,
*,
return_code: Optional[Union[str, "SuccessfulCommandReturnCode"]] = None,
successful_return_codes: Optional[List[int]] = None,
**kwargs
):
"""
:keyword return_code: Possible values include: "Zero", "ZeroOrGreater".
:paramtype return_code: str or ~flow.models.SuccessfulCommandReturnCode
:keyword successful_return_codes:
:paramtype successful_return_codes: list[int]
"""
super(CommandReturnCodeConfig, self).__init__(**kwargs)
self.return_code = return_code
self.successful_return_codes = successful_return_codes
class ComponentConfiguration(msrest.serialization.Model):
"""ComponentConfiguration.
:ivar component_identifier:
:vartype component_identifier: str
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
}
def __init__(
self,
*,
component_identifier: Optional[str] = None,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
"""
super(ComponentConfiguration, self).__init__(**kwargs)
self.component_identifier = component_identifier
class ComponentInput(msrest.serialization.Model):
"""ComponentInput.
:ivar name:
:vartype name: str
:ivar optional:
:vartype optional: bool
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
:ivar default:
:vartype default: str
:ivar enum:
:vartype enum: list[str]
:ivar min:
:vartype min: str
:ivar max:
:vartype max: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'optional': {'key': 'optional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'min': {'key': 'min', 'type': 'str'},
'max': {'key': 'max', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
optional: Optional[bool] = None,
description: Optional[str] = None,
type: Optional[str] = None,
default: Optional[str] = None,
enum: Optional[List[str]] = None,
min: Optional[str] = None,
max: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword optional:
:paramtype optional: bool
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
:keyword default:
:paramtype default: str
:keyword enum:
:paramtype enum: list[str]
:keyword min:
:paramtype min: str
:keyword max:
:paramtype max: str
"""
super(ComponentInput, self).__init__(**kwargs)
self.name = name
self.optional = optional
self.description = description
self.type = type
self.default = default
self.enum = enum
self.min = min
self.max = max
class ComponentJob(msrest.serialization.Model):
"""ComponentJob.
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentJobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
_attribute_map = {
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentJobInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentJobOutput}'},
}
def __init__(
self,
*,
compute: Optional["ComputeConfiguration"] = None,
component_id: Optional[str] = None,
inputs: Optional[Dict[str, "ComponentJobInput"]] = None,
outputs: Optional[Dict[str, "ComponentJobOutput"]] = None,
**kwargs
):
"""
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentJobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentJobOutput]
"""
super(ComponentJob, self).__init__(**kwargs)
self.compute = compute
self.component_id = component_id
self.inputs = inputs
self.outputs = outputs
class ComponentJobInput(msrest.serialization.Model):
"""ComponentJobInput.
:ivar data:
:vartype data: ~flow.models.InputData
:ivar input_binding:
:vartype input_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
'input_binding': {'key': 'inputBinding', 'type': 'str'},
}
def __init__(
self,
*,
data: Optional["InputData"] = None,
input_binding: Optional[str] = None,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
:keyword input_binding:
:paramtype input_binding: str
"""
super(ComponentJobInput, self).__init__(**kwargs)
self.data = data
self.input_binding = input_binding
class ComponentJobOutput(msrest.serialization.Model):
"""ComponentJobOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
:ivar output_binding:
:vartype output_binding: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
'output_binding': {'key': 'outputBinding', 'type': 'str'},
}
def __init__(
self,
*,
data: Optional["MfeInternalOutputData"] = None,
output_binding: Optional[str] = None,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
:keyword output_binding:
:paramtype output_binding: str
"""
super(ComponentJobOutput, self).__init__(**kwargs)
self.data = data
self.output_binding = output_binding
class ComponentNameAndDefaultVersion(msrest.serialization.Model):
"""ComponentNameAndDefaultVersion.
:ivar component_name:
:vartype component_name: str
:ivar version:
:vartype version: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
*,
component_name: Optional[str] = None,
version: Optional[str] = None,
feed_name: Optional[str] = None,
registry_name: Optional[str] = None,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword version:
:paramtype version: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameAndDefaultVersion, self).__init__(**kwargs)
self.component_name = component_name
self.version = version
self.feed_name = feed_name
self.registry_name = registry_name
class ComponentNameMetaInfo(msrest.serialization.Model):
"""ComponentNameMetaInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar registry_name:
:vartype registry_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
}
def __init__(
self,
*,
feed_name: Optional[str] = None,
component_name: Optional[str] = None,
component_version: Optional[str] = None,
registry_name: Optional[str] = None,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword registry_name:
:paramtype registry_name: str
"""
super(ComponentNameMetaInfo, self).__init__(**kwargs)
self.feed_name = feed_name
self.component_name = component_name
self.component_version = component_version
self.registry_name = registry_name
class ComponentOutput(msrest.serialization.Model):
"""ComponentOutput.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword type:
:paramtype type: str
"""
super(ComponentOutput, self).__init__(**kwargs)
self.name = name
self.description = description
self.type = type
class ComponentPreflightResult(msrest.serialization.Model):
"""ComponentPreflightResult.
:ivar error_details:
:vartype error_details: list[~flow.models.RootError]
"""
_attribute_map = {
'error_details': {'key': 'errorDetails', 'type': '[RootError]'},
}
def __init__(
self,
*,
error_details: Optional[List["RootError"]] = None,
**kwargs
):
"""
:keyword error_details:
:paramtype error_details: list[~flow.models.RootError]
"""
super(ComponentPreflightResult, self).__init__(**kwargs)
self.error_details = error_details
class ComponentSpecMetaInfo(msrest.serialization.Model):
"""ComponentSpecMetaInfo.
:ivar component_spec: Anything.
:vartype component_spec: any
:ivar component_version:
:vartype component_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar component_name:
:vartype component_name: str
:ivar description:
:vartype description: str
:ivar is_archived:
:vartype is_archived: bool
"""
_attribute_map = {
'component_spec': {'key': 'componentSpec', 'type': 'object'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
'component_name': {'key': 'componentName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
}
def __init__(
self,
*,
component_spec: Optional[Any] = None,
component_version: Optional[str] = None,
is_anonymous: Optional[bool] = None,
properties: Optional[Dict[str, str]] = None,
tags: Optional[Dict[str, str]] = None,
component_name: Optional[str] = None,
description: Optional[str] = None,
is_archived: Optional[bool] = None,
**kwargs
):
"""
:keyword component_spec: Anything.
:paramtype component_spec: any
:keyword component_version:
:paramtype component_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword component_name:
:paramtype component_name: str
:keyword description:
:paramtype description: str
:keyword is_archived:
:paramtype is_archived: bool
"""
super(ComponentSpecMetaInfo, self).__init__(**kwargs)
self.component_spec = component_spec
self.component_version = component_version
self.is_anonymous = is_anonymous
self.properties = properties
self.tags = tags
self.component_name = component_name
self.description = description
self.is_archived = is_archived
class ComponentUpdateRequest(msrest.serialization.Model):
"""ComponentUpdateRequest.
:ivar original_module_entity:
:vartype original_module_entity: ~flow.models.ModuleEntity
:ivar update_module_entity:
:vartype update_module_entity: ~flow.models.ModuleEntity
:ivar module_name:
:vartype module_name: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar overwrite_with_original_name_and_version:
:vartype overwrite_with_original_name_and_version: bool
:ivar snapshot_id:
:vartype snapshot_id: str
"""
_attribute_map = {
'original_module_entity': {'key': 'originalModuleEntity', 'type': 'ModuleEntity'},
'update_module_entity': {'key': 'updateModuleEntity', 'type': 'ModuleEntity'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'overwrite_with_original_name_and_version': {'key': 'overwriteWithOriginalNameAndVersion', 'type': 'bool'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
}
def __init__(
self,
*,
original_module_entity: Optional["ModuleEntity"] = None,
update_module_entity: Optional["ModuleEntity"] = None,
module_name: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
overwrite_with_original_name_and_version: Optional[bool] = None,
snapshot_id: Optional[str] = None,
**kwargs
):
"""
:keyword original_module_entity:
:paramtype original_module_entity: ~flow.models.ModuleEntity
:keyword update_module_entity:
:paramtype update_module_entity: ~flow.models.ModuleEntity
:keyword module_name:
:paramtype module_name: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword overwrite_with_original_name_and_version:
:paramtype overwrite_with_original_name_and_version: bool
:keyword snapshot_id:
:paramtype snapshot_id: str
"""
super(ComponentUpdateRequest, self).__init__(**kwargs)
self.original_module_entity = original_module_entity
self.update_module_entity = update_module_entity
self.module_name = module_name
self.properties = properties
self.overwrite_with_original_name_and_version = overwrite_with_original_name_and_version
self.snapshot_id = snapshot_id
class ComponentValidationRequest(msrest.serialization.Model):
"""ComponentValidationRequest.
:ivar component_identifier:
:vartype component_identifier: str
:ivar compute_identity:
:vartype compute_identity: ~flow.models.ComputeIdentityDto
:ivar execution_context_dto:
:vartype execution_context_dto: ~flow.models.ExecutionContextDto
:ivar environment_definition:
:vartype environment_definition: ~flow.models.EnvironmentDefinitionDto
:ivar data_port_dtos:
:vartype data_port_dtos: list[~flow.models.DataPortDto]
"""
_attribute_map = {
'component_identifier': {'key': 'componentIdentifier', 'type': 'str'},
'compute_identity': {'key': 'computeIdentity', 'type': 'ComputeIdentityDto'},
'execution_context_dto': {'key': 'executionContextDto', 'type': 'ExecutionContextDto'},
'environment_definition': {'key': 'environmentDefinition', 'type': 'EnvironmentDefinitionDto'},
'data_port_dtos': {'key': 'dataPortDtos', 'type': '[DataPortDto]'},
}
def __init__(
self,
*,
component_identifier: Optional[str] = None,
compute_identity: Optional["ComputeIdentityDto"] = None,
execution_context_dto: Optional["ExecutionContextDto"] = None,
environment_definition: Optional["EnvironmentDefinitionDto"] = None,
data_port_dtos: Optional[List["DataPortDto"]] = None,
**kwargs
):
"""
:keyword component_identifier:
:paramtype component_identifier: str
:keyword compute_identity:
:paramtype compute_identity: ~flow.models.ComputeIdentityDto
:keyword execution_context_dto:
:paramtype execution_context_dto: ~flow.models.ExecutionContextDto
:keyword environment_definition:
:paramtype environment_definition: ~flow.models.EnvironmentDefinitionDto
:keyword data_port_dtos:
:paramtype data_port_dtos: list[~flow.models.DataPortDto]
"""
super(ComponentValidationRequest, self).__init__(**kwargs)
self.component_identifier = component_identifier
self.compute_identity = compute_identity
self.execution_context_dto = execution_context_dto
self.environment_definition = environment_definition
self.data_port_dtos = data_port_dtos
class ComponentValidationResponse(msrest.serialization.Model):
"""ComponentValidationResponse.
:ivar status: Possible values include: "Succeeded", "Failed".
:vartype status: str or ~flow.models.ValidationStatus
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
*,
status: Optional[Union[str, "ValidationStatus"]] = None,
error: Optional["ErrorResponse"] = None,
**kwargs
):
"""
:keyword status: Possible values include: "Succeeded", "Failed".
:paramtype status: str or ~flow.models.ValidationStatus
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
"""
super(ComponentValidationResponse, self).__init__(**kwargs)
self.status = status
self.error = error
class Compute(msrest.serialization.Model):
"""Compute.
:ivar target:
:vartype target: str
:ivar target_type:
:vartype target_type: str
:ivar vm_size:
:vartype vm_size: str
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar gpu_count:
:vartype gpu_count: int
:ivar priority:
:vartype priority: str
:ivar region:
:vartype region: str
:ivar arm_id:
:vartype arm_id: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'target_type': {'key': 'targetType', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'priority': {'key': 'priority', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
target: Optional[str] = None,
target_type: Optional[str] = None,
vm_size: Optional[str] = None,
instance_type: Optional[str] = None,
instance_count: Optional[int] = None,
gpu_count: Optional[int] = None,
priority: Optional[str] = None,
region: Optional[str] = None,
arm_id: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword target_type:
:paramtype target_type: str
:keyword vm_size:
:paramtype vm_size: str
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword gpu_count:
:paramtype gpu_count: int
:keyword priority:
:paramtype priority: str
:keyword region:
:paramtype region: str
:keyword arm_id:
:paramtype arm_id: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(Compute, self).__init__(**kwargs)
self.target = target
self.target_type = target_type
self.vm_size = vm_size
self.instance_type = instance_type
self.instance_count = instance_count
self.gpu_count = gpu_count
self.priority = priority
self.region = region
self.arm_id = arm_id
self.properties = properties
class ComputeConfiguration(msrest.serialization.Model):
"""ComputeConfiguration.
:ivar target:
:vartype target: str
:ivar instance_count:
:vartype instance_count: int
:ivar max_instance_count:
:vartype max_instance_count: int
:ivar is_local:
:vartype is_local: bool
:ivar location:
:vartype location: str
:ivar is_clusterless:
:vartype is_clusterless: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_priority:
:vartype instance_priority: str
:ivar job_priority:
:vartype job_priority: int
:ivar shm_size:
:vartype shm_size: str
:ivar docker_args:
:vartype docker_args: str
:ivar locations:
:vartype locations: list[str]
:ivar properties: Dictionary of :code:`<any>`.
:vartype properties: dict[str, any]
"""
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'},
'is_local': {'key': 'isLocal', 'type': 'bool'},
'location': {'key': 'location', 'type': 'str'},
'is_clusterless': {'key': 'isClusterless', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_priority': {'key': 'instancePriority', 'type': 'str'},
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'docker_args': {'key': 'dockerArgs', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(
self,
*,
target: Optional[str] = None,
instance_count: Optional[int] = None,
max_instance_count: Optional[int] = None,
is_local: Optional[bool] = None,
location: Optional[str] = None,
is_clusterless: Optional[bool] = None,
instance_type: Optional[str] = None,
instance_priority: Optional[str] = None,
job_priority: Optional[int] = None,
shm_size: Optional[str] = None,
docker_args: Optional[str] = None,
locations: Optional[List[str]] = None,
properties: Optional[Dict[str, Any]] = None,
**kwargs
):
"""
:keyword target:
:paramtype target: str
:keyword instance_count:
:paramtype instance_count: int
:keyword max_instance_count:
:paramtype max_instance_count: int
:keyword is_local:
:paramtype is_local: bool
:keyword location:
:paramtype location: str
:keyword is_clusterless:
:paramtype is_clusterless: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_priority:
:paramtype instance_priority: str
:keyword job_priority:
:paramtype job_priority: int
:keyword shm_size:
:paramtype shm_size: str
:keyword docker_args:
:paramtype docker_args: str
:keyword locations:
:paramtype locations: list[str]
:keyword properties: Dictionary of :code:`<any>`.
:paramtype properties: dict[str, any]
"""
super(ComputeConfiguration, self).__init__(**kwargs)
self.target = target
self.instance_count = instance_count
self.max_instance_count = max_instance_count
self.is_local = is_local
self.location = location
self.is_clusterless = is_clusterless
self.instance_type = instance_type
self.instance_priority = instance_priority
self.job_priority = job_priority
self.shm_size = shm_size
self.docker_args = docker_args
self.locations = locations
self.properties = properties
class ComputeContract(msrest.serialization.Model):
"""ComputeContract.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar location:
:vartype location: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar identity:
:vartype identity: ~flow.models.ComputeIdentityContract
:ivar properties:
:vartype properties: ~flow.models.ComputeProperties
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ComputeIdentityContract'},
'properties': {'key': 'properties', 'type': 'ComputeProperties'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
identity: Optional["ComputeIdentityContract"] = None,
properties: Optional["ComputeProperties"] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword location:
:paramtype location: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword identity:
:paramtype identity: ~flow.models.ComputeIdentityContract
:keyword properties:
:paramtype properties: ~flow.models.ComputeProperties
"""
super(ComputeContract, self).__init__(**kwargs)
self.id = id
self.name = name
self.type = None
self.location = location
self.tags = tags
self.identity = identity
self.properties = properties
class ComputeIdentityContract(msrest.serialization.Model):
"""ComputeIdentityContract.
:ivar type:
:vartype type: str
:ivar system_identity_url:
:vartype system_identity_url: str
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar user_assigned_identities: This is a dictionary.
:vartype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'system_identity_url': {'key': 'systemIdentityUrl', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComputeRPUserAssignedIdentity}'},
}
def __init__(
self,
*,
type: Optional[str] = None,
system_identity_url: Optional[str] = None,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
client_secret_url: Optional[str] = None,
user_assigned_identities: Optional[Dict[str, "ComputeRPUserAssignedIdentity"]] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword system_identity_url:
:paramtype system_identity_url: str
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword user_assigned_identities: This is a dictionary.
:paramtype user_assigned_identities: dict[str, ~flow.models.ComputeRPUserAssignedIdentity]
"""
super(ComputeIdentityContract, self).__init__(**kwargs)
self.type = type
self.system_identity_url = system_identity_url
self.principal_id = principal_id
self.tenant_id = tenant_id
self.client_id = client_id
self.client_secret_url = client_secret_url
self.user_assigned_identities = user_assigned_identities
class ComputeIdentityDto(msrest.serialization.Model):
"""ComputeIdentityDto.
:ivar compute_name:
:vartype compute_name: str
:ivar compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:vartype compute_target_type: str or ~flow.models.ComputeTargetType
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'compute_name': {'key': 'computeName', 'type': 'str'},
'compute_target_type': {'key': 'computeTargetType', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
*,
compute_name: Optional[str] = None,
compute_target_type: Optional[Union[str, "ComputeTargetType"]] = None,
intellectual_property_publisher: Optional[str] = None,
**kwargs
):
"""
:keyword compute_name:
:paramtype compute_name: str
:keyword compute_target_type: Possible values include: "Local", "Remote", "HdiCluster",
"ContainerInstance", "AmlCompute", "ComputeInstance", "Cmk8s", "SynapseSpark", "Kubernetes",
"Aisc", "GlobalJobDispatcher", "Databricks", "MockedCompute".
:paramtype compute_target_type: str or ~flow.models.ComputeTargetType
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(ComputeIdentityDto, self).__init__(**kwargs)
self.compute_name = compute_name
self.compute_target_type = compute_target_type
self.intellectual_property_publisher = intellectual_property_publisher
class ComputeInfo(msrest.serialization.Model):
"""ComputeInfo.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None,
is_ssl_enabled: Optional[bool] = None,
is_gpu_type: Optional[bool] = None,
cluster_purpose: Optional[str] = None,
public_ip_address: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
"""
super(ComputeInfo, self).__init__(**kwargs)
self.name = name
self.compute_type = compute_type
self.is_ssl_enabled = is_ssl_enabled
self.is_gpu_type = is_gpu_type
self.cluster_purpose = cluster_purpose
self.public_ip_address = public_ip_address
class ComputeProperties(msrest.serialization.Model):
"""ComputeProperties.
All required parameters must be populated in order to send to Azure.
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar disable_local_auth:
:vartype disable_local_auth: bool
:ivar description:
:vartype description: str
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type: Required.
:vartype compute_type: str
:ivar compute_location:
:vartype compute_location: str
:ivar provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or ~flow.models.ProvisioningState
:ivar provisioning_errors:
:vartype provisioning_errors: list[~flow.models.ODataErrorResponse]
:ivar provisioning_warnings: This is a dictionary.
:vartype provisioning_warnings: dict[str, str]
:ivar is_attached_compute:
:vartype is_attached_compute: bool
:ivar properties: Any object.
:vartype properties: any
:ivar status:
:vartype status: ~flow.models.ComputeStatus
:ivar warnings:
:vartype warnings: list[~flow.models.ComputeWarning]
"""
_validation = {
'compute_type': {'required': True, 'min_length': 1},
}
_attribute_map = {
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_location': {'key': 'computeLocation', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ODataErrorResponse]'},
'provisioning_warnings': {'key': 'provisioningWarnings', 'type': '{str}'},
'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
'properties': {'key': 'properties', 'type': 'object'},
'status': {'key': 'status', 'type': 'ComputeStatus'},
'warnings': {'key': 'warnings', 'type': '[ComputeWarning]'},
}
def __init__(
self,
*,
compute_type: str,
created_on: Optional[datetime.datetime] = None,
modified_on: Optional[datetime.datetime] = None,
disable_local_auth: Optional[bool] = None,
description: Optional[str] = None,
resource_id: Optional[str] = None,
compute_location: Optional[str] = None,
provisioning_state: Optional[Union[str, "ProvisioningState"]] = None,
provisioning_errors: Optional[List["ODataErrorResponse"]] = None,
provisioning_warnings: Optional[Dict[str, str]] = None,
is_attached_compute: Optional[bool] = None,
properties: Optional[Any] = None,
status: Optional["ComputeStatus"] = None,
warnings: Optional[List["ComputeWarning"]] = None,
**kwargs
):
"""
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword disable_local_auth:
:paramtype disable_local_auth: bool
:keyword description:
:paramtype description: str
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type: Required.
:paramtype compute_type: str
:keyword compute_location:
:paramtype compute_location: str
:keyword provisioning_state: Possible values include: "Unknown", "Updating", "Creating",
"Deleting", "Accepted", "Succeeded", "Failed", "Canceled".
:paramtype provisioning_state: str or ~flow.models.ProvisioningState
:keyword provisioning_errors:
:paramtype provisioning_errors: list[~flow.models.ODataErrorResponse]
:keyword provisioning_warnings: This is a dictionary.
:paramtype provisioning_warnings: dict[str, str]
:keyword is_attached_compute:
:paramtype is_attached_compute: bool
:keyword properties: Any object.
:paramtype properties: any
:keyword status:
:paramtype status: ~flow.models.ComputeStatus
:keyword warnings:
:paramtype warnings: list[~flow.models.ComputeWarning]
"""
super(ComputeProperties, self).__init__(**kwargs)
self.created_on = created_on
self.modified_on = modified_on
self.disable_local_auth = disable_local_auth
self.description = description
self.resource_id = resource_id
self.compute_type = compute_type
self.compute_location = compute_location
self.provisioning_state = provisioning_state
self.provisioning_errors = provisioning_errors
self.provisioning_warnings = provisioning_warnings
self.is_attached_compute = is_attached_compute
self.properties = properties
self.status = status
self.warnings = warnings
class ComputeRequest(msrest.serialization.Model):
"""ComputeRequest.
:ivar node_count:
:vartype node_count: int
:ivar gpu_count:
:vartype gpu_count: int
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
}
def __init__(
self,
*,
node_count: Optional[int] = None,
gpu_count: Optional[int] = None,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword gpu_count:
:paramtype gpu_count: int
"""
super(ComputeRequest, self).__init__(**kwargs)
self.node_count = node_count
self.gpu_count = gpu_count
class ComputeRPUserAssignedIdentity(msrest.serialization.Model):
"""ComputeRPUserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar client_id:
:vartype client_id: str
:ivar client_secret_url:
:vartype client_secret_url: str
:ivar resource_id:
:vartype resource_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'client_secret_url': {'key': 'clientSecretUrl', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
}
def __init__(
self,
*,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
client_secret_url: Optional[str] = None,
resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword client_id:
:paramtype client_id: str
:keyword client_secret_url:
:paramtype client_secret_url: str
:keyword resource_id:
:paramtype resource_id: str
"""
super(ComputeRPUserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = principal_id
self.tenant_id = tenant_id
self.client_id = client_id
self.client_secret_url = client_secret_url
self.resource_id = resource_id
class ComputeSetting(msrest.serialization.Model):
"""ComputeSetting.
:ivar name:
:vartype name: str
:ivar compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:vartype compute_type: str or ~flow.models.ComputeType
:ivar batch_ai_compute_info:
:vartype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:ivar remote_docker_compute_info:
:vartype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:ivar hdi_cluster_compute_info:
:vartype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:ivar mlc_compute_info:
:vartype mlc_compute_info: ~flow.models.MlcComputeInfo
:ivar databricks_compute_info:
:vartype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'batch_ai_compute_info': {'key': 'batchAiComputeInfo', 'type': 'BatchAiComputeInfo'},
'remote_docker_compute_info': {'key': 'remoteDockerComputeInfo', 'type': 'RemoteDockerComputeInfo'},
'hdi_cluster_compute_info': {'key': 'hdiClusterComputeInfo', 'type': 'HdiClusterComputeInfo'},
'mlc_compute_info': {'key': 'mlcComputeInfo', 'type': 'MlcComputeInfo'},
'databricks_compute_info': {'key': 'databricksComputeInfo', 'type': 'DatabricksComputeInfo'},
}
def __init__(
self,
*,
name: Optional[str] = None,
compute_type: Optional[Union[str, "ComputeType"]] = None,
batch_ai_compute_info: Optional["BatchAiComputeInfo"] = None,
remote_docker_compute_info: Optional["RemoteDockerComputeInfo"] = None,
hdi_cluster_compute_info: Optional["HdiClusterComputeInfo"] = None,
mlc_compute_info: Optional["MlcComputeInfo"] = None,
databricks_compute_info: Optional["DatabricksComputeInfo"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_type: Possible values include: "BatchAi", "MLC", "HdiCluster", "RemoteDocker",
"Databricks", "Aisc".
:paramtype compute_type: str or ~flow.models.ComputeType
:keyword batch_ai_compute_info:
:paramtype batch_ai_compute_info: ~flow.models.BatchAiComputeInfo
:keyword remote_docker_compute_info:
:paramtype remote_docker_compute_info: ~flow.models.RemoteDockerComputeInfo
:keyword hdi_cluster_compute_info:
:paramtype hdi_cluster_compute_info: ~flow.models.HdiClusterComputeInfo
:keyword mlc_compute_info:
:paramtype mlc_compute_info: ~flow.models.MlcComputeInfo
:keyword databricks_compute_info:
:paramtype databricks_compute_info: ~flow.models.DatabricksComputeInfo
"""
super(ComputeSetting, self).__init__(**kwargs)
self.name = name
self.compute_type = compute_type
self.batch_ai_compute_info = batch_ai_compute_info
self.remote_docker_compute_info = remote_docker_compute_info
self.hdi_cluster_compute_info = hdi_cluster_compute_info
self.mlc_compute_info = mlc_compute_info
self.databricks_compute_info = databricks_compute_info
class ComputeStatus(msrest.serialization.Model):
"""ComputeStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar is_status_available:
:vartype is_status_available: bool
:ivar detailed_status: Anything.
:vartype detailed_status: any
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_validation = {
'is_status_available': {'readonly': True},
}
_attribute_map = {
'is_status_available': {'key': 'isStatusAvailable', 'type': 'bool'},
'detailed_status': {'key': 'detailedStatus', 'type': 'object'},
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
*,
detailed_status: Optional[Any] = None,
error: Optional["ODataError"] = None,
**kwargs
):
"""
:keyword detailed_status: Anything.
:paramtype detailed_status: any
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ComputeStatus, self).__init__(**kwargs)
self.is_status_available = None
self.detailed_status = detailed_status
self.error = error
class ComputeStatusDetail(msrest.serialization.Model):
"""ComputeStatusDetail.
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar provisioning_error_message:
:vartype provisioning_error_message: str
"""
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'provisioning_error_message': {'key': 'provisioningErrorMessage', 'type': 'str'},
}
def __init__(
self,
*,
provisioning_state: Optional[str] = None,
provisioning_error_message: Optional[str] = None,
**kwargs
):
"""
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword provisioning_error_message:
:paramtype provisioning_error_message: str
"""
super(ComputeStatusDetail, self).__init__(**kwargs)
self.provisioning_state = provisioning_state
self.provisioning_error_message = provisioning_error_message
class ComputeWarning(msrest.serialization.Model):
"""ComputeWarning.
:ivar title:
:vartype title: str
:ivar message:
:vartype message: str
:ivar code:
:vartype code: str
:ivar severity: Possible values include: "Critical", "Error", "Warning", "Info".
:vartype severity: str or ~flow.models.SeverityLevel
"""
_attribute_map = {
'title': {'key': 'title', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
}
def __init__(
self,
*,
title: Optional[str] = None,
message: Optional[str] = None,
code: Optional[str] = None,
severity: Optional[Union[str, "SeverityLevel"]] = None,
**kwargs
):
"""
:keyword title:
:paramtype title: str
:keyword message:
:paramtype message: str
:keyword code:
:paramtype code: str
:keyword severity: Possible values include: "Critical", "Error", "Warning", "Info".
:paramtype severity: str or ~flow.models.SeverityLevel
"""
super(ComputeWarning, self).__init__(**kwargs)
self.title = title
self.message = message
self.code = code
self.severity = severity
class ConnectionConfigSpec(msrest.serialization.Model):
"""ConnectionConfigSpec.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar description:
:vartype description: str
:ivar default_value:
:vartype default_value: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
config_value_type: Optional[Union[str, "ConfigValueType"]] = None,
description: Optional[str] = None,
default_value: Optional[str] = None,
enum_values: Optional[List[str]] = None,
is_optional: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword description:
:paramtype description: str
:keyword default_value:
:paramtype default_value: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ConnectionConfigSpec, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.config_value_type = config_value_type
self.description = description
self.default_value = default_value
self.enum_values = enum_values
self.is_optional = is_optional
class ConnectionDto(msrest.serialization.Model):
"""ConnectionDto.
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
connection_name: Optional[str] = None,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
configs: Optional[Dict[str, str]] = None,
custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None,
expiry_time: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionDto, self).__init__(**kwargs)
self.connection_name = connection_name
self.connection_type = connection_type
self.configs = configs
self.custom_configs = custom_configs
self.expiry_time = expiry_time
self.owner = owner
self.created_date = created_date
self.last_modified_date = last_modified_date
class ConnectionEntity(msrest.serialization.Model):
"""ConnectionEntity.
:ivar connection_id:
:vartype connection_id: str
:ivar connection_name:
:vartype connection_name: str
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
:ivar secret_name:
:vartype secret_name: str
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'connection_id': {'key': 'connectionId', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'secret_name': {'key': 'secretName', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
connection_id: Optional[str] = None,
connection_name: Optional[str] = None,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
connection_scope: Optional[Union[str, "ConnectionScope"]] = None,
configs: Optional[Dict[str, str]] = None,
custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None,
expiry_time: Optional[datetime.datetime] = None,
secret_name: Optional[str] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword connection_id:
:paramtype connection_id: str
:keyword connection_name:
:paramtype connection_name: str
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
:keyword secret_name:
:paramtype secret_name: str
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ConnectionEntity, self).__init__(**kwargs)
self.connection_id = connection_id
self.connection_name = connection_name
self.connection_type = connection_type
self.connection_scope = connection_scope
self.configs = configs
self.custom_configs = custom_configs
self.expiry_time = expiry_time
self.secret_name = secret_name
self.owner = owner
self.created_date = created_date
self.last_modified_date = last_modified_date
class ConnectionOverrideSetting(msrest.serialization.Model):
"""ConnectionOverrideSetting.
:ivar connection_source_type: Possible values include: "Node", "NodeInput".
:vartype connection_source_type: str or ~flow.models.ConnectionSourceType
:ivar node_name:
:vartype node_name: str
:ivar node_input_name:
:vartype node_input_name: str
:ivar node_deployment_name_input:
:vartype node_deployment_name_input: str
:ivar node_model_input:
:vartype node_model_input: str
:ivar connection_name:
:vartype connection_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar model:
:vartype model: str
:ivar connection_types:
:vartype connection_types: list[str or ~flow.models.ConnectionType]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar model_enum:
:vartype model_enum: list[str]
"""
_attribute_map = {
'connection_source_type': {'key': 'connectionSourceType', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'node_input_name': {'key': 'nodeInputName', 'type': 'str'},
'node_deployment_name_input': {'key': 'nodeDeploymentNameInput', 'type': 'str'},
'node_model_input': {'key': 'nodeModelInput', 'type': 'str'},
'connection_name': {'key': 'connectionName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'model': {'key': 'model', 'type': 'str'},
'connection_types': {'key': 'connectionTypes', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'model_enum': {'key': 'modelEnum', 'type': '[str]'},
}
def __init__(
self,
*,
connection_source_type: Optional[Union[str, "ConnectionSourceType"]] = None,
node_name: Optional[str] = None,
node_input_name: Optional[str] = None,
node_deployment_name_input: Optional[str] = None,
node_model_input: Optional[str] = None,
connection_name: Optional[str] = None,
deployment_name: Optional[str] = None,
model: Optional[str] = None,
connection_types: Optional[List[Union[str, "ConnectionType"]]] = None,
capabilities: Optional["AzureOpenAIModelCapabilities"] = None,
model_enum: Optional[List[str]] = None,
**kwargs
):
"""
:keyword connection_source_type: Possible values include: "Node", "NodeInput".
:paramtype connection_source_type: str or ~flow.models.ConnectionSourceType
:keyword node_name:
:paramtype node_name: str
:keyword node_input_name:
:paramtype node_input_name: str
:keyword node_deployment_name_input:
:paramtype node_deployment_name_input: str
:keyword node_model_input:
:paramtype node_model_input: str
:keyword connection_name:
:paramtype connection_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword model:
:paramtype model: str
:keyword connection_types:
:paramtype connection_types: list[str or ~flow.models.ConnectionType]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword model_enum:
:paramtype model_enum: list[str]
"""
super(ConnectionOverrideSetting, self).__init__(**kwargs)
self.connection_source_type = connection_source_type
self.node_name = node_name
self.node_input_name = node_input_name
self.node_deployment_name_input = node_deployment_name_input
self.node_model_input = node_model_input
self.connection_name = connection_name
self.deployment_name = deployment_name
self.model = model
self.connection_types = connection_types
self.capabilities = capabilities
self.model_enum = model_enum
class ConnectionSpec(msrest.serialization.Model):
"""ConnectionSpec.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
}
def __init__(
self,
*,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
config_specs: Optional[List["ConnectionConfigSpec"]] = None,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
"""
super(ConnectionSpec, self).__init__(**kwargs)
self.connection_type = connection_type
self.config_specs = config_specs
class ContainerInstanceConfiguration(msrest.serialization.Model):
"""ContainerInstanceConfiguration.
:ivar region:
:vartype region: str
:ivar cpu_cores:
:vartype cpu_cores: float
:ivar memory_gb:
:vartype memory_gb: float
"""
_attribute_map = {
'region': {'key': 'region', 'type': 'str'},
'cpu_cores': {'key': 'cpuCores', 'type': 'float'},
'memory_gb': {'key': 'memoryGb', 'type': 'float'},
}
def __init__(
self,
*,
region: Optional[str] = None,
cpu_cores: Optional[float] = None,
memory_gb: Optional[float] = None,
**kwargs
):
"""
:keyword region:
:paramtype region: str
:keyword cpu_cores:
:paramtype cpu_cores: float
:keyword memory_gb:
:paramtype memory_gb: float
"""
super(ContainerInstanceConfiguration, self).__init__(**kwargs)
self.region = region
self.cpu_cores = cpu_cores
self.memory_gb = memory_gb
class ContainerRegistry(msrest.serialization.Model):
"""ContainerRegistry.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar credential_type:
:vartype credential_type: str
:ivar registry_identity:
:vartype registry_identity: ~flow.models.RegistryIdentity
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'credential_type': {'key': 'credentialType', 'type': 'str'},
'registry_identity': {'key': 'registryIdentity', 'type': 'RegistryIdentity'},
}
def __init__(
self,
*,
address: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
credential_type: Optional[str] = None,
registry_identity: Optional["RegistryIdentity"] = None,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword credential_type:
:paramtype credential_type: str
:keyword registry_identity:
:paramtype registry_identity: ~flow.models.RegistryIdentity
"""
super(ContainerRegistry, self).__init__(**kwargs)
self.address = address
self.username = username
self.password = password
self.credential_type = credential_type
self.registry_identity = registry_identity
class ContainerResourceRequirements(msrest.serialization.Model):
"""ContainerResourceRequirements.
:ivar cpu:
:vartype cpu: float
:ivar cpu_limit:
:vartype cpu_limit: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar memory_in_gb_limit:
:vartype memory_in_gb_limit: float
:ivar gpu_enabled:
:vartype gpu_enabled: bool
:ivar gpu:
:vartype gpu: int
:ivar fpga:
:vartype fpga: int
"""
_attribute_map = {
'cpu': {'key': 'cpu', 'type': 'float'},
'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
'gpu_enabled': {'key': 'gpuEnabled', 'type': 'bool'},
'gpu': {'key': 'gpu', 'type': 'int'},
'fpga': {'key': 'fpga', 'type': 'int'},
}
def __init__(
self,
*,
cpu: Optional[float] = None,
cpu_limit: Optional[float] = None,
memory_in_gb: Optional[float] = None,
memory_in_gb_limit: Optional[float] = None,
gpu_enabled: Optional[bool] = None,
gpu: Optional[int] = None,
fpga: Optional[int] = None,
**kwargs
):
"""
:keyword cpu:
:paramtype cpu: float
:keyword cpu_limit:
:paramtype cpu_limit: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword memory_in_gb_limit:
:paramtype memory_in_gb_limit: float
:keyword gpu_enabled:
:paramtype gpu_enabled: bool
:keyword gpu:
:paramtype gpu: int
:keyword fpga:
:paramtype fpga: int
"""
super(ContainerResourceRequirements, self).__init__(**kwargs)
self.cpu = cpu
self.cpu_limit = cpu_limit
self.memory_in_gb = memory_in_gb
self.memory_in_gb_limit = memory_in_gb_limit
self.gpu_enabled = gpu_enabled
self.gpu = gpu
self.fpga = fpga
class ControlInput(msrest.serialization.Model):
"""ControlInput.
:ivar name:
:vartype name: str
:ivar default_value: Possible values include: "None", "False", "True", "Skipped".
:vartype default_value: str or ~flow.models.ControlInputValue
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
default_value: Optional[Union[str, "ControlInputValue"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword default_value: Possible values include: "None", "False", "True", "Skipped".
:paramtype default_value: str or ~flow.models.ControlInputValue
"""
super(ControlInput, self).__init__(**kwargs)
self.name = name
self.default_value = default_value
class ControlOutput(msrest.serialization.Model):
"""ControlOutput.
:ivar name:
:vartype name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
"""
super(ControlOutput, self).__init__(**kwargs)
self.name = name
class CopyDataTask(msrest.serialization.Model):
"""CopyDataTask.
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
*,
data_copy_mode: Optional[Union[str, "DataCopyMode"]] = None,
**kwargs
):
"""
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(CopyDataTask, self).__init__(**kwargs)
self.data_copy_mode = data_copy_mode
class CreatedBy(msrest.serialization.Model):
"""CreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
}
def __init__(
self,
*,
user_object_id: Optional[str] = None,
user_tenant_id: Optional[str] = None,
user_name: Optional[str] = None,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
"""
super(CreatedBy, self).__init__(**kwargs)
self.user_object_id = user_object_id
self.user_tenant_id = user_tenant_id
self.user_name = user_name
class CreatedFromDto(msrest.serialization.Model):
"""CreatedFromDto.
:ivar type: The only acceptable values to pass in are None and "Notebook". The default value
is None.
:vartype type: str
:ivar location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:vartype location_type: str
:ivar location:
:vartype location: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[str] = None,
location_type: Optional[str] = None,
location: Optional[str] = None,
**kwargs
):
"""
:keyword type: The only acceptable values to pass in are None and "Notebook". The default
value is None.
:paramtype type: str
:keyword location_type: The only acceptable values to pass in are None and "ArtifactId". The
default value is None.
:paramtype location_type: str
:keyword location:
:paramtype location: str
"""
super(CreatedFromDto, self).__init__(**kwargs)
self.type = type
self.location_type = location_type
self.location = location
class CreateFlowFromSampleRequest(msrest.serialization.Model):
"""CreateFlowFromSampleRequest.
:ivar flow_name:
:vartype flow_name: str
:ivar sample_resource_id:
:vartype sample_resource_id: str
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar is_archived:
:vartype is_archived: bool
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
}
def __init__(
self,
*,
flow_name: Optional[str] = None,
sample_resource_id: Optional[str] = None,
flow_definition_file_path: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
is_archived: Optional[bool] = None,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword sample_resource_id:
:paramtype sample_resource_id: str
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword is_archived:
:paramtype is_archived: bool
"""
super(CreateFlowFromSampleRequest, self).__init__(**kwargs)
self.flow_name = flow_name
self.sample_resource_id = sample_resource_id
self.flow_definition_file_path = flow_definition_file_path
self.tags = tags
self.is_archived = is_archived
class CreateFlowRequest(msrest.serialization.Model):
"""CreateFlowRequest.
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
flow_name: Optional[str] = None,
description: Optional[str] = None,
details: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
flow: Optional["Flow"] = None,
flow_definition_file_path: Optional[str] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
flow_run_settings: Optional["FlowRunSettings"] = None,
is_archived: Optional[bool] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(CreateFlowRequest, self).__init__(**kwargs)
self.flow_name = flow_name
self.description = description
self.details = details
self.tags = tags
self.flow = flow
self.flow_definition_file_path = flow_definition_file_path
self.flow_type = flow_type
self.flow_run_settings = flow_run_settings
self.is_archived = is_archived
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class CreateFlowRuntimeRequest(msrest.serialization.Model):
"""CreateFlowRuntimeRequest.
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar deployment_name:
:vartype deployment_name: str
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
*,
runtime_type: Optional[Union[str, "RuntimeType"]] = None,
identity: Optional["ManagedServiceIdentity"] = None,
instance_type: Optional[str] = None,
from_existing_endpoint: Optional[bool] = None,
from_existing_deployment: Optional[bool] = None,
endpoint_name: Optional[str] = None,
deployment_name: Optional[str] = None,
compute_instance_name: Optional[str] = None,
from_existing_custom_app: Optional[bool] = None,
custom_app_name: Optional[str] = None,
runtime_description: Optional[str] = None,
environment: Optional[str] = None,
instance_count: Optional[int] = None,
**kwargs
):
"""
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword deployment_name:
:paramtype deployment_name: str
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(CreateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_type = runtime_type
self.identity = identity
self.instance_type = instance_type
self.from_existing_endpoint = from_existing_endpoint
self.from_existing_deployment = from_existing_deployment
self.endpoint_name = endpoint_name
self.deployment_name = deployment_name
self.compute_instance_name = compute_instance_name
self.from_existing_custom_app = from_existing_custom_app
self.custom_app_name = custom_app_name
self.runtime_description = runtime_description
self.environment = environment
self.instance_count = instance_count
class CreateFlowSessionRequest(msrest.serialization.Model):
"""CreateFlowSessionRequest.
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar base_image:
:vartype base_image: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'action': {'key': 'action', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
python_pip_requirements: Optional[List[str]] = None,
base_image: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
action: Optional[Union[str, "SetupFlowSessionAction"]] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword base_image:
:paramtype base_image: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword identity:
:paramtype identity: str
"""
super(CreateFlowSessionRequest, self).__init__(**kwargs)
self.python_pip_requirements = python_pip_requirements
self.base_image = base_image
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.action = action
self.identity = identity
class CreateInferencePipelineRequest(msrest.serialization.Model):
"""CreateInferencePipelineRequest.
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar training_pipeline_draft_name:
:vartype training_pipeline_draft_name: str
:ivar training_pipeline_run_display_name:
:vartype training_pipeline_run_display_name: str
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'training_pipeline_draft_name': {'key': 'trainingPipelineDraftName', 'type': 'str'},
'training_pipeline_run_display_name': {'key': 'trainingPipelineRunDisplayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
module_node_id: Optional[str] = None,
port_name: Optional[str] = None,
training_pipeline_draft_name: Optional[str] = None,
training_pipeline_run_display_name: Optional[str] = None,
name: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword training_pipeline_draft_name:
:paramtype training_pipeline_draft_name: str
:keyword training_pipeline_run_display_name:
:paramtype training_pipeline_run_display_name: str
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreateInferencePipelineRequest, self).__init__(**kwargs)
self.module_node_id = module_node_id
self.port_name = port_name
self.training_pipeline_draft_name = training_pipeline_draft_name
self.training_pipeline_run_display_name = training_pipeline_run_display_name
self.name = name
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.graph_components_mode = graph_components_mode
self.sub_pipelines_info = sub_pipelines_info
self.flattened_sub_graphs = flattened_sub_graphs
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class CreateOrUpdateConnectionRequest(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequest.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_scope: Possible values include: "User", "WorkspaceShared".
:vartype connection_scope: str or ~flow.models.ConnectionScope
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_scope': {'key': 'connectionScope', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
connection_scope: Optional[Union[str, "ConnectionScope"]] = None,
configs: Optional[Dict[str, str]] = None,
custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None,
expiry_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_scope: Possible values include: "User", "WorkspaceShared".
:paramtype connection_scope: str or ~flow.models.ConnectionScope
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequest, self).__init__(**kwargs)
self.connection_type = connection_type
self.connection_scope = connection_scope
self.configs = configs
self.custom_configs = custom_configs
self.expiry_time = expiry_time
class CreateOrUpdateConnectionRequestDto(msrest.serialization.Model):
"""CreateOrUpdateConnectionRequestDto.
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar configs: This is a dictionary.
:vartype configs: dict[str, str]
:ivar custom_configs: This is a dictionary.
:vartype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:ivar expiry_time:
:vartype expiry_time: ~datetime.datetime
"""
_attribute_map = {
'connection_type': {'key': 'connectionType', 'type': 'str'},
'configs': {'key': 'configs', 'type': '{str}'},
'custom_configs': {'key': 'customConfigs', 'type': '{CustomConnectionConfig}'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
configs: Optional[Dict[str, str]] = None,
custom_configs: Optional[Dict[str, "CustomConnectionConfig"]] = None,
expiry_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword configs: This is a dictionary.
:paramtype configs: dict[str, str]
:keyword custom_configs: This is a dictionary.
:paramtype custom_configs: dict[str, ~flow.models.CustomConnectionConfig]
:keyword expiry_time:
:paramtype expiry_time: ~datetime.datetime
"""
super(CreateOrUpdateConnectionRequestDto, self).__init__(**kwargs)
self.connection_type = connection_type
self.configs = configs
self.custom_configs = custom_configs
self.expiry_time = expiry_time
class CreatePipelineDraftRequest(msrest.serialization.Model):
"""CreatePipelineDraftRequest.
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePipelineDraftRequest, self).__init__(**kwargs)
self.name = name
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.graph_components_mode = graph_components_mode
self.sub_pipelines_info = sub_pipelines_info
self.flattened_sub_graphs = flattened_sub_graphs
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class CreatePipelineJobScheduleDto(msrest.serialization.Model):
"""CreatePipelineJobScheduleDto.
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
name: Optional[str] = None,
pipeline_job_name: Optional[str] = None,
pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None,
display_name: Optional[str] = None,
trigger_type: Optional[Union[str, "TriggerType"]] = None,
recurrence: Optional["Recurrence"] = None,
cron: Optional["Cron"] = None,
status: Optional[Union[str, "ScheduleStatus"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(CreatePipelineJobScheduleDto, self).__init__(**kwargs)
self.name = name
self.pipeline_job_name = pipeline_job_name
self.pipeline_job_runtime_settings = pipeline_job_runtime_settings
self.display_name = display_name
self.trigger_type = trigger_type
self.recurrence = recurrence
self.cron = cron
self.status = status
self.description = description
self.tags = tags
self.properties = properties
class CreatePublishedPipelineRequest(msrest.serialization.Model):
"""CreatePublishedPipelineRequest.
:ivar use_pipeline_endpoint:
:vartype use_pipeline_endpoint: bool
:ivar pipeline_name:
:vartype pipeline_name: str
:ivar pipeline_description:
:vartype pipeline_description: str
:ivar use_existing_pipeline_endpoint:
:vartype use_existing_pipeline_endpoint: bool
:ivar pipeline_endpoint_name:
:vartype pipeline_endpoint_name: str
:ivar pipeline_endpoint_description:
:vartype pipeline_endpoint_description: str
:ivar set_as_default_pipeline_for_endpoint:
:vartype set_as_default_pipeline_for_endpoint: bool
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'use_pipeline_endpoint': {'key': 'usePipelineEndpoint', 'type': 'bool'},
'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
'pipeline_description': {'key': 'pipelineDescription', 'type': 'str'},
'use_existing_pipeline_endpoint': {'key': 'useExistingPipelineEndpoint', 'type': 'bool'},
'pipeline_endpoint_name': {'key': 'pipelineEndpointName', 'type': 'str'},
'pipeline_endpoint_description': {'key': 'pipelineEndpointDescription', 'type': 'str'},
'set_as_default_pipeline_for_endpoint': {'key': 'setAsDefaultPipelineForEndpoint', 'type': 'bool'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
use_pipeline_endpoint: Optional[bool] = None,
pipeline_name: Optional[str] = None,
pipeline_description: Optional[str] = None,
use_existing_pipeline_endpoint: Optional[bool] = None,
pipeline_endpoint_name: Optional[str] = None,
pipeline_endpoint_description: Optional[str] = None,
set_as_default_pipeline_for_endpoint: Optional[bool] = None,
step_tags: Optional[Dict[str, str]] = None,
experiment_name: Optional[str] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
enable_notification: Optional[bool] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
display_name: Optional[str] = None,
run_id: Optional[str] = None,
parent_run_id: Optional[str] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword use_pipeline_endpoint:
:paramtype use_pipeline_endpoint: bool
:keyword pipeline_name:
:paramtype pipeline_name: str
:keyword pipeline_description:
:paramtype pipeline_description: str
:keyword use_existing_pipeline_endpoint:
:paramtype use_existing_pipeline_endpoint: bool
:keyword pipeline_endpoint_name:
:paramtype pipeline_endpoint_name: str
:keyword pipeline_endpoint_description:
:paramtype pipeline_endpoint_description: str
:keyword set_as_default_pipeline_for_endpoint:
:paramtype set_as_default_pipeline_for_endpoint: bool
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(CreatePublishedPipelineRequest, self).__init__(**kwargs)
self.use_pipeline_endpoint = use_pipeline_endpoint
self.pipeline_name = pipeline_name
self.pipeline_description = pipeline_description
self.use_existing_pipeline_endpoint = use_existing_pipeline_endpoint
self.pipeline_endpoint_name = pipeline_endpoint_name
self.pipeline_endpoint_description = pipeline_endpoint_description
self.set_as_default_pipeline_for_endpoint = set_as_default_pipeline_for_endpoint
self.step_tags = step_tags
self.experiment_name = experiment_name
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.enable_notification = enable_notification
self.sub_pipelines_info = sub_pipelines_info
self.display_name = display_name
self.run_id = run_id
self.parent_run_id = parent_run_id
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class CreateRealTimeEndpointRequest(msrest.serialization.Model):
"""CreateRealTimeEndpointRequest.
:ivar name:
:vartype name: str
:ivar compute_info:
:vartype compute_info: ~flow.models.ComputeInfo
:ivar description:
:vartype description: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar aks_advance_settings:
:vartype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:ivar aci_advance_settings:
:vartype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:ivar linked_training_pipeline_run_id:
:vartype linked_training_pipeline_run_id: str
:ivar linked_experiment_name:
:vartype linked_experiment_name: str
:ivar graph_nodes_run_id_mapping: This is a dictionary.
:vartype graph_nodes_run_id_mapping: dict[str, str]
:ivar workflow:
:vartype workflow: ~flow.models.PipelineGraph
:ivar inputs:
:vartype inputs: list[~flow.models.InputOutputPortMetadata]
:ivar outputs:
:vartype outputs: list[~flow.models.InputOutputPortMetadata]
:ivar example_request:
:vartype example_request: ~flow.models.ExampleRequest
:ivar user_storage_connection_string:
:vartype user_storage_connection_string: str
:ivar user_storage_endpoint_uri:
:vartype user_storage_endpoint_uri: str
:ivar user_storage_workspace_sai_token:
:vartype user_storage_workspace_sai_token: str
:ivar user_storage_container_name:
:vartype user_storage_container_name: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'compute_info': {'key': 'computeInfo', 'type': 'ComputeInfo'},
'description': {'key': 'description', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'aks_advance_settings': {'key': 'aksAdvanceSettings', 'type': 'AKSAdvanceSettings'},
'aci_advance_settings': {'key': 'aciAdvanceSettings', 'type': 'ACIAdvanceSettings'},
'linked_training_pipeline_run_id': {'key': 'linkedTrainingPipelineRunId', 'type': 'str'},
'linked_experiment_name': {'key': 'linkedExperimentName', 'type': 'str'},
'graph_nodes_run_id_mapping': {'key': 'graphNodesRunIdMapping', 'type': '{str}'},
'workflow': {'key': 'workflow', 'type': 'PipelineGraph'},
'inputs': {'key': 'inputs', 'type': '[InputOutputPortMetadata]'},
'outputs': {'key': 'outputs', 'type': '[InputOutputPortMetadata]'},
'example_request': {'key': 'exampleRequest', 'type': 'ExampleRequest'},
'user_storage_connection_string': {'key': 'userStorageConnectionString', 'type': 'str'},
'user_storage_endpoint_uri': {'key': 'userStorageEndpointUri', 'type': 'str'},
'user_storage_workspace_sai_token': {'key': 'userStorageWorkspaceSaiToken', 'type': 'str'},
'user_storage_container_name': {'key': 'userStorageContainerName', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
compute_info: Optional["ComputeInfo"] = None,
description: Optional[str] = None,
linked_pipeline_draft_id: Optional[str] = None,
linked_pipeline_run_id: Optional[str] = None,
aks_advance_settings: Optional["AKSAdvanceSettings"] = None,
aci_advance_settings: Optional["ACIAdvanceSettings"] = None,
linked_training_pipeline_run_id: Optional[str] = None,
linked_experiment_name: Optional[str] = None,
graph_nodes_run_id_mapping: Optional[Dict[str, str]] = None,
workflow: Optional["PipelineGraph"] = None,
inputs: Optional[List["InputOutputPortMetadata"]] = None,
outputs: Optional[List["InputOutputPortMetadata"]] = None,
example_request: Optional["ExampleRequest"] = None,
user_storage_connection_string: Optional[str] = None,
user_storage_endpoint_uri: Optional[str] = None,
user_storage_workspace_sai_token: Optional[str] = None,
user_storage_container_name: Optional[str] = None,
pipeline_run_id: Optional[str] = None,
root_pipeline_run_id: Optional[str] = None,
experiment_name: Optional[str] = None,
experiment_id: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword compute_info:
:paramtype compute_info: ~flow.models.ComputeInfo
:keyword description:
:paramtype description: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword aks_advance_settings:
:paramtype aks_advance_settings: ~flow.models.AKSAdvanceSettings
:keyword aci_advance_settings:
:paramtype aci_advance_settings: ~flow.models.ACIAdvanceSettings
:keyword linked_training_pipeline_run_id:
:paramtype linked_training_pipeline_run_id: str
:keyword linked_experiment_name:
:paramtype linked_experiment_name: str
:keyword graph_nodes_run_id_mapping: This is a dictionary.
:paramtype graph_nodes_run_id_mapping: dict[str, str]
:keyword workflow:
:paramtype workflow: ~flow.models.PipelineGraph
:keyword inputs:
:paramtype inputs: list[~flow.models.InputOutputPortMetadata]
:keyword outputs:
:paramtype outputs: list[~flow.models.InputOutputPortMetadata]
:keyword example_request:
:paramtype example_request: ~flow.models.ExampleRequest
:keyword user_storage_connection_string:
:paramtype user_storage_connection_string: str
:keyword user_storage_endpoint_uri:
:paramtype user_storage_endpoint_uri: str
:keyword user_storage_workspace_sai_token:
:paramtype user_storage_workspace_sai_token: str
:keyword user_storage_container_name:
:paramtype user_storage_container_name: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(CreateRealTimeEndpointRequest, self).__init__(**kwargs)
self.name = name
self.compute_info = compute_info
self.description = description
self.linked_pipeline_draft_id = linked_pipeline_draft_id
self.linked_pipeline_run_id = linked_pipeline_run_id
self.aks_advance_settings = aks_advance_settings
self.aci_advance_settings = aci_advance_settings
self.linked_training_pipeline_run_id = linked_training_pipeline_run_id
self.linked_experiment_name = linked_experiment_name
self.graph_nodes_run_id_mapping = graph_nodes_run_id_mapping
self.workflow = workflow
self.inputs = inputs
self.outputs = outputs
self.example_request = example_request
self.user_storage_connection_string = user_storage_connection_string
self.user_storage_endpoint_uri = user_storage_endpoint_uri
self.user_storage_workspace_sai_token = user_storage_workspace_sai_token
self.user_storage_container_name = user_storage_container_name
self.pipeline_run_id = pipeline_run_id
self.root_pipeline_run_id = root_pipeline_run_id
self.experiment_name = experiment_name
self.experiment_id = experiment_id
class CreationContext(msrest.serialization.Model):
"""CreationContext.
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar creation_source:
:vartype creation_source: str
"""
_attribute_map = {
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'creation_source': {'key': 'creationSource', 'type': 'str'},
}
def __init__(
self,
*,
created_time: Optional[datetime.datetime] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
creation_source: Optional[str] = None,
**kwargs
):
"""
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword creation_source:
:paramtype creation_source: str
"""
super(CreationContext, self).__init__(**kwargs)
self.created_time = created_time
self.created_by = created_by
self.creation_source = creation_source
class Cron(msrest.serialization.Model):
"""Cron.
:ivar expression:
:vartype expression: str
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'expression': {'key': 'expression', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
*,
expression: Optional[str] = None,
end_time: Optional[str] = None,
start_time: Optional[str] = None,
time_zone: Optional[str] = None,
**kwargs
):
"""
:keyword expression:
:paramtype expression: str
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Cron, self).__init__(**kwargs)
self.expression = expression
self.end_time = end_time
self.start_time = start_time
self.time_zone = time_zone
class CustomConnectionConfig(msrest.serialization.Model):
"""CustomConnectionConfig.
:ivar config_value_type: Possible values include: "String", "Secret".
:vartype config_value_type: str or ~flow.models.ConfigValueType
:ivar value:
:vartype value: str
"""
_attribute_map = {
'config_value_type': {'key': 'configValueType', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
config_value_type: Optional[Union[str, "ConfigValueType"]] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword config_value_type: Possible values include: "String", "Secret".
:paramtype config_value_type: str or ~flow.models.ConfigValueType
:keyword value:
:paramtype value: str
"""
super(CustomConnectionConfig, self).__init__(**kwargs)
self.config_value_type = config_value_type
self.value = value
class CustomReference(msrest.serialization.Model):
"""CustomReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(CustomReference, self).__init__(**kwargs)
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
class Data(msrest.serialization.Model):
"""Data.
:ivar data_location:
:vartype data_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'data_location': {'key': 'dataLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
*,
data_location: Optional["ExecutionDataLocation"] = None,
mechanism: Optional[Union[str, "DeliveryMechanism"]] = None,
environment_variable_name: Optional[str] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
options: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword data_location:
:paramtype data_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(Data, self).__init__(**kwargs)
self.data_location = data_location
self.mechanism = mechanism
self.environment_variable_name = environment_variable_name
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.options = options
class DatabaseSink(msrest.serialization.Model):
"""DatabaseSink.
:ivar connection:
:vartype connection: str
:ivar table:
:vartype table: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'table': {'key': 'table', 'type': 'str'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
table: Optional[str] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword table:
:paramtype table: str
"""
super(DatabaseSink, self).__init__(**kwargs)
self.connection = connection
self.table = table
class DatabaseSource(msrest.serialization.Model):
"""DatabaseSource.
:ivar connection:
:vartype connection: str
:ivar query:
:vartype query: str
:ivar stored_procedure_name:
:vartype stored_procedure_name: str
:ivar stored_procedure_parameters:
:vartype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'query': {'key': 'query', 'type': 'str'},
'stored_procedure_name': {'key': 'storedProcedureName', 'type': 'str'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
query: Optional[str] = None,
stored_procedure_name: Optional[str] = None,
stored_procedure_parameters: Optional[List["StoredProcedureParameter"]] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword query:
:paramtype query: str
:keyword stored_procedure_name:
:paramtype stored_procedure_name: str
:keyword stored_procedure_parameters:
:paramtype stored_procedure_parameters: list[~flow.models.StoredProcedureParameter]
"""
super(DatabaseSource, self).__init__(**kwargs)
self.connection = connection
self.query = query
self.stored_procedure_name = stored_procedure_name
self.stored_procedure_parameters = stored_procedure_parameters
class DatabricksComputeInfo(msrest.serialization.Model):
"""DatabricksComputeInfo.
:ivar existing_cluster_id:
:vartype existing_cluster_id: str
"""
_attribute_map = {
'existing_cluster_id': {'key': 'existingClusterId', 'type': 'str'},
}
def __init__(
self,
*,
existing_cluster_id: Optional[str] = None,
**kwargs
):
"""
:keyword existing_cluster_id:
:paramtype existing_cluster_id: str
"""
super(DatabricksComputeInfo, self).__init__(**kwargs)
self.existing_cluster_id = existing_cluster_id
class DatabricksConfiguration(msrest.serialization.Model):
"""DatabricksConfiguration.
:ivar workers:
:vartype workers: int
:ivar minimum_worker_count:
:vartype minimum_worker_count: int
:ivar max_mum_worker_count:
:vartype max_mum_worker_count: int
:ivar spark_version:
:vartype spark_version: str
:ivar node_type_id:
:vartype node_type_id: str
:ivar spark_conf: Dictionary of :code:`<string>`.
:vartype spark_conf: dict[str, str]
:ivar spark_env_vars: Dictionary of :code:`<string>`.
:vartype spark_env_vars: dict[str, str]
:ivar cluster_log_conf_dbfs_path:
:vartype cluster_log_conf_dbfs_path: str
:ivar dbfs_init_scripts:
:vartype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:ivar instance_pool_id:
:vartype instance_pool_id: str
:ivar timeout_seconds:
:vartype timeout_seconds: int
:ivar notebook_task:
:vartype notebook_task: ~flow.models.NoteBookTaskDto
:ivar spark_python_task:
:vartype spark_python_task: ~flow.models.SparkPythonTaskDto
:ivar spark_jar_task:
:vartype spark_jar_task: ~flow.models.SparkJarTaskDto
:ivar spark_submit_task:
:vartype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:ivar jar_libraries:
:vartype jar_libraries: list[str]
:ivar egg_libraries:
:vartype egg_libraries: list[str]
:ivar whl_libraries:
:vartype whl_libraries: list[str]
:ivar pypi_libraries:
:vartype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar r_cran_libraries:
:vartype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:ivar maven_libraries:
:vartype maven_libraries: list[~flow.models.MavenLibraryDto]
:ivar libraries:
:vartype libraries: list[any]
:ivar linked_adb_workspace_metadata:
:vartype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:ivar databrick_resource_id:
:vartype databrick_resource_id: str
:ivar auto_scale:
:vartype auto_scale: bool
"""
_attribute_map = {
'workers': {'key': 'workers', 'type': 'int'},
'minimum_worker_count': {'key': 'minimumWorkerCount', 'type': 'int'},
'max_mum_worker_count': {'key': 'maxMumWorkerCount', 'type': 'int'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
'node_type_id': {'key': 'nodeTypeId', 'type': 'str'},
'spark_conf': {'key': 'sparkConf', 'type': '{str}'},
'spark_env_vars': {'key': 'sparkEnvVars', 'type': '{str}'},
'cluster_log_conf_dbfs_path': {'key': 'clusterLogConfDbfsPath', 'type': 'str'},
'dbfs_init_scripts': {'key': 'dbfsInitScripts', 'type': '[InitScriptInfoDto]'},
'instance_pool_id': {'key': 'instancePoolId', 'type': 'str'},
'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
'notebook_task': {'key': 'notebookTask', 'type': 'NoteBookTaskDto'},
'spark_python_task': {'key': 'sparkPythonTask', 'type': 'SparkPythonTaskDto'},
'spark_jar_task': {'key': 'sparkJarTask', 'type': 'SparkJarTaskDto'},
'spark_submit_task': {'key': 'sparkSubmitTask', 'type': 'SparkSubmitTaskDto'},
'jar_libraries': {'key': 'jarLibraries', 'type': '[str]'},
'egg_libraries': {'key': 'eggLibraries', 'type': '[str]'},
'whl_libraries': {'key': 'whlLibraries', 'type': '[str]'},
'pypi_libraries': {'key': 'pypiLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'r_cran_libraries': {'key': 'rCranLibraries', 'type': '[PythonPyPiOrRCranLibraryDto]'},
'maven_libraries': {'key': 'mavenLibraries', 'type': '[MavenLibraryDto]'},
'libraries': {'key': 'libraries', 'type': '[object]'},
'linked_adb_workspace_metadata': {'key': 'linkedADBWorkspaceMetadata', 'type': 'LinkedADBWorkspaceMetadata'},
'databrick_resource_id': {'key': 'databrickResourceId', 'type': 'str'},
'auto_scale': {'key': 'autoScale', 'type': 'bool'},
}
def __init__(
self,
*,
workers: Optional[int] = None,
minimum_worker_count: Optional[int] = None,
max_mum_worker_count: Optional[int] = None,
spark_version: Optional[str] = None,
node_type_id: Optional[str] = None,
spark_conf: Optional[Dict[str, str]] = None,
spark_env_vars: Optional[Dict[str, str]] = None,
cluster_log_conf_dbfs_path: Optional[str] = None,
dbfs_init_scripts: Optional[List["InitScriptInfoDto"]] = None,
instance_pool_id: Optional[str] = None,
timeout_seconds: Optional[int] = None,
notebook_task: Optional["NoteBookTaskDto"] = None,
spark_python_task: Optional["SparkPythonTaskDto"] = None,
spark_jar_task: Optional["SparkJarTaskDto"] = None,
spark_submit_task: Optional["SparkSubmitTaskDto"] = None,
jar_libraries: Optional[List[str]] = None,
egg_libraries: Optional[List[str]] = None,
whl_libraries: Optional[List[str]] = None,
pypi_libraries: Optional[List["PythonPyPiOrRCranLibraryDto"]] = None,
r_cran_libraries: Optional[List["PythonPyPiOrRCranLibraryDto"]] = None,
maven_libraries: Optional[List["MavenLibraryDto"]] = None,
libraries: Optional[List[Any]] = None,
linked_adb_workspace_metadata: Optional["LinkedADBWorkspaceMetadata"] = None,
databrick_resource_id: Optional[str] = None,
auto_scale: Optional[bool] = None,
**kwargs
):
"""
:keyword workers:
:paramtype workers: int
:keyword minimum_worker_count:
:paramtype minimum_worker_count: int
:keyword max_mum_worker_count:
:paramtype max_mum_worker_count: int
:keyword spark_version:
:paramtype spark_version: str
:keyword node_type_id:
:paramtype node_type_id: str
:keyword spark_conf: Dictionary of :code:`<string>`.
:paramtype spark_conf: dict[str, str]
:keyword spark_env_vars: Dictionary of :code:`<string>`.
:paramtype spark_env_vars: dict[str, str]
:keyword cluster_log_conf_dbfs_path:
:paramtype cluster_log_conf_dbfs_path: str
:keyword dbfs_init_scripts:
:paramtype dbfs_init_scripts: list[~flow.models.InitScriptInfoDto]
:keyword instance_pool_id:
:paramtype instance_pool_id: str
:keyword timeout_seconds:
:paramtype timeout_seconds: int
:keyword notebook_task:
:paramtype notebook_task: ~flow.models.NoteBookTaskDto
:keyword spark_python_task:
:paramtype spark_python_task: ~flow.models.SparkPythonTaskDto
:keyword spark_jar_task:
:paramtype spark_jar_task: ~flow.models.SparkJarTaskDto
:keyword spark_submit_task:
:paramtype spark_submit_task: ~flow.models.SparkSubmitTaskDto
:keyword jar_libraries:
:paramtype jar_libraries: list[str]
:keyword egg_libraries:
:paramtype egg_libraries: list[str]
:keyword whl_libraries:
:paramtype whl_libraries: list[str]
:keyword pypi_libraries:
:paramtype pypi_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword r_cran_libraries:
:paramtype r_cran_libraries: list[~flow.models.PythonPyPiOrRCranLibraryDto]
:keyword maven_libraries:
:paramtype maven_libraries: list[~flow.models.MavenLibraryDto]
:keyword libraries:
:paramtype libraries: list[any]
:keyword linked_adb_workspace_metadata:
:paramtype linked_adb_workspace_metadata: ~flow.models.LinkedADBWorkspaceMetadata
:keyword databrick_resource_id:
:paramtype databrick_resource_id: str
:keyword auto_scale:
:paramtype auto_scale: bool
"""
super(DatabricksConfiguration, self).__init__(**kwargs)
self.workers = workers
self.minimum_worker_count = minimum_worker_count
self.max_mum_worker_count = max_mum_worker_count
self.spark_version = spark_version
self.node_type_id = node_type_id
self.spark_conf = spark_conf
self.spark_env_vars = spark_env_vars
self.cluster_log_conf_dbfs_path = cluster_log_conf_dbfs_path
self.dbfs_init_scripts = dbfs_init_scripts
self.instance_pool_id = instance_pool_id
self.timeout_seconds = timeout_seconds
self.notebook_task = notebook_task
self.spark_python_task = spark_python_task
self.spark_jar_task = spark_jar_task
self.spark_submit_task = spark_submit_task
self.jar_libraries = jar_libraries
self.egg_libraries = egg_libraries
self.whl_libraries = whl_libraries
self.pypi_libraries = pypi_libraries
self.r_cran_libraries = r_cran_libraries
self.maven_libraries = maven_libraries
self.libraries = libraries
self.linked_adb_workspace_metadata = linked_adb_workspace_metadata
self.databrick_resource_id = databrick_resource_id
self.auto_scale = auto_scale
class DatacacheConfiguration(msrest.serialization.Model):
"""DatacacheConfiguration.
:ivar datacache_id:
:vartype datacache_id: str
:ivar datacache_store:
:vartype datacache_store: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: The only acceptable values to pass in are None and "Mount". The default value is
None.
:vartype mode: str
:ivar replica:
:vartype replica: int
:ivar failure_fallback:
:vartype failure_fallback: bool
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'datacache_id': {'key': 'datacacheId', 'type': 'str'},
'datacache_store': {'key': 'datacacheStore', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'replica': {'key': 'replica', 'type': 'int'},
'failure_fallback': {'key': 'failureFallback', 'type': 'bool'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
*,
datacache_id: Optional[str] = None,
datacache_store: Optional[str] = None,
dataset_id: Optional[str] = None,
mode: Optional[str] = None,
replica: Optional[int] = None,
failure_fallback: Optional[bool] = None,
path_on_compute: Optional[str] = None,
**kwargs
):
"""
:keyword datacache_id:
:paramtype datacache_id: str
:keyword datacache_store:
:paramtype datacache_store: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: The only acceptable values to pass in are None and "Mount". The default value
is None.
:paramtype mode: str
:keyword replica:
:paramtype replica: int
:keyword failure_fallback:
:paramtype failure_fallback: bool
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatacacheConfiguration, self).__init__(**kwargs)
self.datacache_id = datacache_id
self.datacache_store = datacache_store
self.dataset_id = dataset_id
self.mode = mode
self.replica = replica
self.failure_fallback = failure_fallback
self.path_on_compute = path_on_compute
class DataInfo(msrest.serialization.Model):
"""DataInfo.
:ivar feed_name:
:vartype feed_name: str
:ivar id:
:vartype id: str
:ivar data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:vartype data_source_type: str or ~flow.models.DataSourceType
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar modified_date:
:vartype modified_date: ~datetime.datetime
:ivar registered_by:
:vartype registered_by: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar dataset_type:
:vartype dataset_type: str
:ivar saved_dataset_id:
:vartype saved_dataset_id: str
:ivar dataset_version_id:
:vartype dataset_version_id: str
:ivar is_visible:
:vartype is_visible: bool
:ivar is_registered:
:vartype is_registered: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, any]
:ivar connection_string:
:vartype connection_string: str
:ivar container_name:
:vartype container_name: str
:ivar data_storage_endpoint_uri:
:vartype data_storage_endpoint_uri: str
:ivar workspace_sai_token:
:vartype workspace_sai_token: str
:ivar aml_dataset_data_flow:
:vartype aml_dataset_data_flow: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar asset_type:
:vartype asset_type: str
:ivar is_data_v2:
:vartype is_data_v2: bool
:ivar asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed".
:vartype asset_scope_type: str or ~flow.models.AssetScopeTypes
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar output_port_name:
:vartype output_port_name: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'modified_date': {'key': 'modifiedDate', 'type': 'iso-8601'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'saved_dataset_id': {'key': 'savedDatasetId', 'type': 'str'},
'dataset_version_id': {'key': 'datasetVersionId', 'type': 'str'},
'is_visible': {'key': 'isVisible', 'type': 'bool'},
'is_registered': {'key': 'isRegistered', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{object}'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'container_name': {'key': 'containerName', 'type': 'str'},
'data_storage_endpoint_uri': {'key': 'dataStorageEndpointUri', 'type': 'str'},
'workspace_sai_token': {'key': 'workspaceSaiToken', 'type': 'str'},
'aml_dataset_data_flow': {'key': 'amlDatasetDataFlow', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'asset_type': {'key': 'assetType', 'type': 'str'},
'is_data_v2': {'key': 'isDataV2', 'type': 'bool'},
'asset_scope_type': {'key': 'assetScopeType', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
}
def __init__(
self,
*,
feed_name: Optional[str] = None,
id: Optional[str] = None,
data_source_type: Optional[Union[str, "DataSourceType"]] = None,
name: Optional[str] = None,
description: Optional[str] = None,
data_type_id: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
modified_date: Optional[datetime.datetime] = None,
registered_by: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
created_by_studio: Optional[bool] = None,
data_reference_type: Optional[Union[str, "DataReferenceType"]] = None,
dataset_type: Optional[str] = None,
saved_dataset_id: Optional[str] = None,
dataset_version_id: Optional[str] = None,
is_visible: Optional[bool] = None,
is_registered: Optional[bool] = None,
properties: Optional[Dict[str, Any]] = None,
connection_string: Optional[str] = None,
container_name: Optional[str] = None,
data_storage_endpoint_uri: Optional[str] = None,
workspace_sai_token: Optional[str] = None,
aml_dataset_data_flow: Optional[str] = None,
system_data: Optional["SystemData"] = None,
arm_id: Optional[str] = None,
asset_id: Optional[str] = None,
asset_uri: Optional[str] = None,
asset_type: Optional[str] = None,
is_data_v2: Optional[bool] = None,
asset_scope_type: Optional[Union[str, "AssetScopeTypes"]] = None,
pipeline_run_id: Optional[str] = None,
module_node_id: Optional[str] = None,
output_port_name: Optional[str] = None,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword id:
:paramtype id: str
:keyword data_source_type: Possible values include: "None", "PipelineDataSource", "AmlDataset",
"GlobalDataset", "FeedModel", "FeedDataset", "AmlDataVersion", "AMLModelVersion".
:paramtype data_source_type: str or ~flow.models.DataSourceType
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword modified_date:
:paramtype modified_date: ~datetime.datetime
:keyword registered_by:
:paramtype registered_by: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword dataset_type:
:paramtype dataset_type: str
:keyword saved_dataset_id:
:paramtype saved_dataset_id: str
:keyword dataset_version_id:
:paramtype dataset_version_id: str
:keyword is_visible:
:paramtype is_visible: bool
:keyword is_registered:
:paramtype is_registered: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, any]
:keyword connection_string:
:paramtype connection_string: str
:keyword container_name:
:paramtype container_name: str
:keyword data_storage_endpoint_uri:
:paramtype data_storage_endpoint_uri: str
:keyword workspace_sai_token:
:paramtype workspace_sai_token: str
:keyword aml_dataset_data_flow:
:paramtype aml_dataset_data_flow: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword asset_type:
:paramtype asset_type: str
:keyword is_data_v2:
:paramtype is_data_v2: bool
:keyword asset_scope_type: Possible values include: "Workspace", "Global", "All", "Feed".
:paramtype asset_scope_type: str or ~flow.models.AssetScopeTypes
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword output_port_name:
:paramtype output_port_name: str
"""
super(DataInfo, self).__init__(**kwargs)
self.feed_name = feed_name
self.id = id
self.data_source_type = data_source_type
self.name = name
self.description = description
self.data_type_id = data_type_id
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
self.created_date = created_date
self.modified_date = modified_date
self.registered_by = registered_by
self.tags = tags
self.created_by_studio = created_by_studio
self.data_reference_type = data_reference_type
self.dataset_type = dataset_type
self.saved_dataset_id = saved_dataset_id
self.dataset_version_id = dataset_version_id
self.is_visible = is_visible
self.is_registered = is_registered
self.properties = properties
self.connection_string = connection_string
self.container_name = container_name
self.data_storage_endpoint_uri = data_storage_endpoint_uri
self.workspace_sai_token = workspace_sai_token
self.aml_dataset_data_flow = aml_dataset_data_flow
self.system_data = system_data
self.arm_id = arm_id
self.asset_id = asset_id
self.asset_uri = asset_uri
self.asset_type = asset_type
self.is_data_v2 = is_data_v2
self.asset_scope_type = asset_scope_type
self.pipeline_run_id = pipeline_run_id
self.module_node_id = module_node_id
self.output_port_name = output_port_name
class DataLocation(msrest.serialization.Model):
"""DataLocation.
:ivar storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:vartype storage_type: str or ~flow.models.DataLocationStorageType
:ivar storage_id:
:vartype storage_id: str
:ivar uri:
:vartype uri: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference:
:vartype data_reference: ~flow.models.DataReference
:ivar aml_dataset:
:vartype aml_dataset: ~flow.models.AmlDataset
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'storage_type': {'key': 'storageType', 'type': 'str'},
'storage_id': {'key': 'storageId', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference': {'key': 'dataReference', 'type': 'DataReference'},
'aml_dataset': {'key': 'amlDataset', 'type': 'AmlDataset'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
*,
storage_type: Optional[Union[str, "DataLocationStorageType"]] = None,
storage_id: Optional[str] = None,
uri: Optional[str] = None,
data_store_name: Optional[str] = None,
data_reference: Optional["DataReference"] = None,
aml_dataset: Optional["AmlDataset"] = None,
asset_definition: Optional["AssetDefinition"] = None,
**kwargs
):
"""
:keyword storage_type: Possible values include: "None", "AzureBlob", "Artifact", "Snapshot",
"SavedAmlDataset", "Asset".
:paramtype storage_type: str or ~flow.models.DataLocationStorageType
:keyword storage_id:
:paramtype storage_id: str
:keyword uri:
:paramtype uri: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference:
:paramtype data_reference: ~flow.models.DataReference
:keyword aml_dataset:
:paramtype aml_dataset: ~flow.models.AmlDataset
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataLocation, self).__init__(**kwargs)
self.storage_type = storage_type
self.storage_id = storage_id
self.uri = uri
self.data_store_name = data_store_name
self.data_reference = data_reference
self.aml_dataset = aml_dataset
self.asset_definition = asset_definition
class DataPath(msrest.serialization.Model):
"""DataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar sql_data_path:
:vartype sql_data_path: ~flow.models.SqlDataPath
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
sql_data_path: Optional["SqlDataPath"] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword sql_data_path:
:paramtype sql_data_path: ~flow.models.SqlDataPath
"""
super(DataPath, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.relative_path = relative_path
self.sql_data_path = sql_data_path
class DataPathParameter(msrest.serialization.Model):
"""DataPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.LegacyDataPath
:ivar is_optional:
:vartype is_optional: bool
:ivar data_type_id:
:vartype data_type_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'LegacyDataPath'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
default_value: Optional["LegacyDataPath"] = None,
is_optional: Optional[bool] = None,
data_type_id: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.LegacyDataPath
:keyword is_optional:
:paramtype is_optional: bool
:keyword data_type_id:
:paramtype data_type_id: str
"""
super(DataPathParameter, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.default_value = default_value
self.is_optional = is_optional
self.data_type_id = data_type_id
class DataPortDto(msrest.serialization.Model):
"""DataPortDto.
:ivar data_port_type: Possible values include: "Input", "Output".
:vartype data_port_type: str or ~flow.models.DataPortType
:ivar data_port_name:
:vartype data_port_name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:vartype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:ivar data_store_intellectual_property_publisher:
:vartype data_store_intellectual_property_publisher: str
"""
_attribute_map = {
'data_port_type': {'key': 'dataPortType', 'type': 'str'},
'data_port_name': {'key': 'dataPortName', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_intellectual_property_access_mode': {'key': 'dataStoreIntellectualPropertyAccessMode', 'type': 'str'},
'data_store_intellectual_property_publisher': {'key': 'dataStoreIntellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
*,
data_port_type: Optional[Union[str, "DataPortType"]] = None,
data_port_name: Optional[str] = None,
data_store_name: Optional[str] = None,
data_store_intellectual_property_access_mode: Optional[Union[str, "IntellectualPropertyAccessMode"]] = None,
data_store_intellectual_property_publisher: Optional[str] = None,
**kwargs
):
"""
:keyword data_port_type: Possible values include: "Input", "Output".
:paramtype data_port_type: str or ~flow.models.DataPortType
:keyword data_port_name:
:paramtype data_port_name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_intellectual_property_access_mode: Possible values include: "ReadOnly",
"ReadWrite".
:paramtype data_store_intellectual_property_access_mode: str or
~flow.models.IntellectualPropertyAccessMode
:keyword data_store_intellectual_property_publisher:
:paramtype data_store_intellectual_property_publisher: str
"""
super(DataPortDto, self).__init__(**kwargs)
self.data_port_type = data_port_type
self.data_port_name = data_port_name
self.data_store_name = data_store_name
self.data_store_intellectual_property_access_mode = data_store_intellectual_property_access_mode
self.data_store_intellectual_property_publisher = data_store_intellectual_property_publisher
class DataReference(msrest.serialization.Model):
"""DataReference.
:ivar type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:vartype type: str or ~flow.models.DataReferenceType
:ivar azure_blob_reference:
:vartype azure_blob_reference: ~flow.models.AzureBlobReference
:ivar azure_data_lake_reference:
:vartype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:ivar azure_files_reference:
:vartype azure_files_reference: ~flow.models.AzureFilesReference
:ivar azure_sql_database_reference:
:vartype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_postgres_database_reference:
:vartype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:ivar azure_data_lake_gen2_reference:
:vartype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:ivar dbfs_reference:
:vartype dbfs_reference: ~flow.models.DBFSReference
:ivar azure_my_sql_database_reference:
:vartype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:ivar custom_reference:
:vartype custom_reference: ~flow.models.CustomReference
:ivar hdfs_reference:
:vartype hdfs_reference: ~flow.models.HdfsReference
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'azure_blob_reference': {'key': 'azureBlobReference', 'type': 'AzureBlobReference'},
'azure_data_lake_reference': {'key': 'azureDataLakeReference', 'type': 'AzureDataLakeReference'},
'azure_files_reference': {'key': 'azureFilesReference', 'type': 'AzureFilesReference'},
'azure_sql_database_reference': {'key': 'azureSqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_postgres_database_reference': {'key': 'azurePostgresDatabaseReference', 'type': 'AzureDatabaseReference'},
'azure_data_lake_gen2_reference': {'key': 'azureDataLakeGen2Reference', 'type': 'AzureDataLakeGen2Reference'},
'dbfs_reference': {'key': 'dbfsReference', 'type': 'DBFSReference'},
'azure_my_sql_database_reference': {'key': 'azureMySqlDatabaseReference', 'type': 'AzureDatabaseReference'},
'custom_reference': {'key': 'customReference', 'type': 'CustomReference'},
'hdfs_reference': {'key': 'hdfsReference', 'type': 'HdfsReference'},
}
def __init__(
self,
*,
type: Optional[Union[str, "DataReferenceType"]] = None,
azure_blob_reference: Optional["AzureBlobReference"] = None,
azure_data_lake_reference: Optional["AzureDataLakeReference"] = None,
azure_files_reference: Optional["AzureFilesReference"] = None,
azure_sql_database_reference: Optional["AzureDatabaseReference"] = None,
azure_postgres_database_reference: Optional["AzureDatabaseReference"] = None,
azure_data_lake_gen2_reference: Optional["AzureDataLakeGen2Reference"] = None,
dbfs_reference: Optional["DBFSReference"] = None,
azure_my_sql_database_reference: Optional["AzureDatabaseReference"] = None,
custom_reference: Optional["CustomReference"] = None,
hdfs_reference: Optional["HdfsReference"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "None", "AzureBlob", "AzureDataLake", "AzureFiles",
"AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS", "AzureMySqlDatabase",
"Custom", "Hdfs".
:paramtype type: str or ~flow.models.DataReferenceType
:keyword azure_blob_reference:
:paramtype azure_blob_reference: ~flow.models.AzureBlobReference
:keyword azure_data_lake_reference:
:paramtype azure_data_lake_reference: ~flow.models.AzureDataLakeReference
:keyword azure_files_reference:
:paramtype azure_files_reference: ~flow.models.AzureFilesReference
:keyword azure_sql_database_reference:
:paramtype azure_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_postgres_database_reference:
:paramtype azure_postgres_database_reference: ~flow.models.AzureDatabaseReference
:keyword azure_data_lake_gen2_reference:
:paramtype azure_data_lake_gen2_reference: ~flow.models.AzureDataLakeGen2Reference
:keyword dbfs_reference:
:paramtype dbfs_reference: ~flow.models.DBFSReference
:keyword azure_my_sql_database_reference:
:paramtype azure_my_sql_database_reference: ~flow.models.AzureDatabaseReference
:keyword custom_reference:
:paramtype custom_reference: ~flow.models.CustomReference
:keyword hdfs_reference:
:paramtype hdfs_reference: ~flow.models.HdfsReference
"""
super(DataReference, self).__init__(**kwargs)
self.type = type
self.azure_blob_reference = azure_blob_reference
self.azure_data_lake_reference = azure_data_lake_reference
self.azure_files_reference = azure_files_reference
self.azure_sql_database_reference = azure_sql_database_reference
self.azure_postgres_database_reference = azure_postgres_database_reference
self.azure_data_lake_gen2_reference = azure_data_lake_gen2_reference
self.dbfs_reference = dbfs_reference
self.azure_my_sql_database_reference = azure_my_sql_database_reference
self.custom_reference = custom_reference
self.hdfs_reference = hdfs_reference
class DataReferenceConfiguration(msrest.serialization.Model):
"""DataReferenceConfiguration.
:ivar data_store_name:
:vartype data_store_name: str
:ivar mode: Possible values include: "Mount", "Download", "Upload".
:vartype mode: str or ~flow.models.DataStoreMode
:ivar path_on_data_store:
:vartype path_on_data_store: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'path_on_data_store': {'key': 'pathOnDataStore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
mode: Optional[Union[str, "DataStoreMode"]] = None,
path_on_data_store: Optional[str] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword mode: Possible values include: "Mount", "Download", "Upload".
:paramtype mode: str or ~flow.models.DataStoreMode
:keyword path_on_data_store:
:paramtype path_on_data_store: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
"""
super(DataReferenceConfiguration, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.mode = mode
self.path_on_data_store = path_on_data_store
self.path_on_compute = path_on_compute
self.overwrite = overwrite
class DataSetDefinition(msrest.serialization.Model):
"""DataSetDefinition.
:ivar data_type_short_name:
:vartype data_type_short_name: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar value:
:vartype value: ~flow.models.DataSetDefinitionValue
"""
_attribute_map = {
'data_type_short_name': {'key': 'dataTypeShortName', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'value': {'key': 'value', 'type': 'DataSetDefinitionValue'},
}
def __init__(
self,
*,
data_type_short_name: Optional[str] = None,
parameter_name: Optional[str] = None,
value: Optional["DataSetDefinitionValue"] = None,
**kwargs
):
"""
:keyword data_type_short_name:
:paramtype data_type_short_name: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword value:
:paramtype value: ~flow.models.DataSetDefinitionValue
"""
super(DataSetDefinition, self).__init__(**kwargs)
self.data_type_short_name = data_type_short_name
self.parameter_name = parameter_name
self.value = value
class DataSetDefinitionValue(msrest.serialization.Model):
"""DataSetDefinitionValue.
:ivar literal_value:
:vartype literal_value: ~flow.models.DataPath
:ivar data_set_reference:
:vartype data_set_reference: ~flow.models.RegisteredDataSetReference
:ivar saved_data_set_reference:
:vartype saved_data_set_reference: ~flow.models.SavedDataSetReference
:ivar asset_definition:
:vartype asset_definition: ~flow.models.AssetDefinition
"""
_attribute_map = {
'literal_value': {'key': 'literalValue', 'type': 'DataPath'},
'data_set_reference': {'key': 'dataSetReference', 'type': 'RegisteredDataSetReference'},
'saved_data_set_reference': {'key': 'savedDataSetReference', 'type': 'SavedDataSetReference'},
'asset_definition': {'key': 'assetDefinition', 'type': 'AssetDefinition'},
}
def __init__(
self,
*,
literal_value: Optional["DataPath"] = None,
data_set_reference: Optional["RegisteredDataSetReference"] = None,
saved_data_set_reference: Optional["SavedDataSetReference"] = None,
asset_definition: Optional["AssetDefinition"] = None,
**kwargs
):
"""
:keyword literal_value:
:paramtype literal_value: ~flow.models.DataPath
:keyword data_set_reference:
:paramtype data_set_reference: ~flow.models.RegisteredDataSetReference
:keyword saved_data_set_reference:
:paramtype saved_data_set_reference: ~flow.models.SavedDataSetReference
:keyword asset_definition:
:paramtype asset_definition: ~flow.models.AssetDefinition
"""
super(DataSetDefinitionValue, self).__init__(**kwargs)
self.literal_value = literal_value
self.data_set_reference = data_set_reference
self.saved_data_set_reference = saved_data_set_reference
self.asset_definition = asset_definition
class DatasetIdentifier(msrest.serialization.Model):
"""DatasetIdentifier.
:ivar saved_id:
:vartype saved_id: str
:ivar registered_id:
:vartype registered_id: str
:ivar registered_version:
:vartype registered_version: str
"""
_attribute_map = {
'saved_id': {'key': 'savedId', 'type': 'str'},
'registered_id': {'key': 'registeredId', 'type': 'str'},
'registered_version': {'key': 'registeredVersion', 'type': 'str'},
}
def __init__(
self,
*,
saved_id: Optional[str] = None,
registered_id: Optional[str] = None,
registered_version: Optional[str] = None,
**kwargs
):
"""
:keyword saved_id:
:paramtype saved_id: str
:keyword registered_id:
:paramtype registered_id: str
:keyword registered_version:
:paramtype registered_version: str
"""
super(DatasetIdentifier, self).__init__(**kwargs)
self.saved_id = saved_id
self.registered_id = registered_id
self.registered_version = registered_version
class DatasetInputDetails(msrest.serialization.Model):
"""DatasetInputDetails.
:ivar input_name:
:vartype input_name: str
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'input_name': {'key': 'inputName', 'type': 'str'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
*,
input_name: Optional[str] = None,
mechanism: Optional[Union[str, "DatasetDeliveryMechanism"]] = None,
path_on_compute: Optional[str] = None,
**kwargs
):
"""
:keyword input_name:
:paramtype input_name: str
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DatasetDeliveryMechanism
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(DatasetInputDetails, self).__init__(**kwargs)
self.input_name = input_name
self.mechanism = mechanism
self.path_on_compute = path_on_compute
class DatasetLineage(msrest.serialization.Model):
"""DatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar consumption_type: Possible values include: "RunInput", "Reference".
:vartype consumption_type: str or ~flow.models.DatasetConsumptionType
:ivar input_details:
:vartype input_details: ~flow.models.DatasetInputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'consumption_type': {'key': 'consumptionType', 'type': 'str'},
'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'},
}
def __init__(
self,
*,
identifier: Optional["DatasetIdentifier"] = None,
consumption_type: Optional[Union[str, "DatasetConsumptionType"]] = None,
input_details: Optional["DatasetInputDetails"] = None,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword consumption_type: Possible values include: "RunInput", "Reference".
:paramtype consumption_type: str or ~flow.models.DatasetConsumptionType
:keyword input_details:
:paramtype input_details: ~flow.models.DatasetInputDetails
"""
super(DatasetLineage, self).__init__(**kwargs)
self.identifier = identifier
self.consumption_type = consumption_type
self.input_details = input_details
class DatasetOutput(msrest.serialization.Model):
"""DatasetOutput.
:ivar dataset_type: Possible values include: "File", "Tabular".
:vartype dataset_type: str or ~flow.models.DatasetType
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
_attribute_map = {
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
}
def __init__(
self,
*,
dataset_type: Optional[Union[str, "DatasetType"]] = None,
dataset_registration: Optional["DatasetRegistration"] = None,
dataset_output_options: Optional["DatasetOutputOptions"] = None,
**kwargs
):
"""
:keyword dataset_type: Possible values include: "File", "Tabular".
:paramtype dataset_type: str or ~flow.models.DatasetType
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
"""
super(DatasetOutput, self).__init__(**kwargs)
self.dataset_type = dataset_type
self.dataset_registration = dataset_registration
self.dataset_output_options = dataset_output_options
class DatasetOutputDetails(msrest.serialization.Model):
"""DatasetOutputDetails.
:ivar output_name:
:vartype output_name: str
"""
_attribute_map = {
'output_name': {'key': 'outputName', 'type': 'str'},
}
def __init__(
self,
*,
output_name: Optional[str] = None,
**kwargs
):
"""
:keyword output_name:
:paramtype output_name: str
"""
super(DatasetOutputDetails, self).__init__(**kwargs)
self.output_name = output_name
class DatasetOutputOptions(msrest.serialization.Model):
"""DatasetOutputOptions.
:ivar source_globs:
:vartype source_globs: ~flow.models.GlobsOptions
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_datastore_parameter_assignment:
:vartype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'source_globs': {'key': 'sourceGlobs', 'type': 'GlobsOptions'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_datastore_parameter_assignment': {'key': 'PathOnDatastoreParameterAssignment', 'type': 'ParameterAssignment'},
}
def __init__(
self,
*,
source_globs: Optional["GlobsOptions"] = None,
path_on_datastore: Optional[str] = None,
path_on_datastore_parameter_assignment: Optional["ParameterAssignment"] = None,
**kwargs
):
"""
:keyword source_globs:
:paramtype source_globs: ~flow.models.GlobsOptions
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_datastore_parameter_assignment:
:paramtype path_on_datastore_parameter_assignment: ~flow.models.ParameterAssignment
"""
super(DatasetOutputOptions, self).__init__(**kwargs)
self.source_globs = source_globs
self.path_on_datastore = path_on_datastore
self.path_on_datastore_parameter_assignment = path_on_datastore_parameter_assignment
class DataSetPathParameter(msrest.serialization.Model):
"""DataSetPathParameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: ~flow.models.DataSetDefinitionValue
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'DataSetDefinitionValue'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
default_value: Optional["DataSetDefinitionValue"] = None,
is_optional: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: ~flow.models.DataSetDefinitionValue
:keyword is_optional:
:paramtype is_optional: bool
"""
super(DataSetPathParameter, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.default_value = default_value
self.is_optional = is_optional
class DatasetRegistration(msrest.serialization.Model):
"""DatasetRegistration.
:ivar name:
:vartype name: str
:ivar create_new_version:
:vartype create_new_version: bool
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'create_new_version': {'key': 'createNewVersion', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
create_new_version: Optional[bool] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword create_new_version:
:paramtype create_new_version: bool
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(DatasetRegistration, self).__init__(**kwargs)
self.name = name
self.create_new_version = create_new_version
self.description = description
self.tags = tags
self.additional_transformations = additional_transformations
class DatasetRegistrationOptions(msrest.serialization.Model):
"""DatasetRegistrationOptions.
:ivar additional_transformation:
:vartype additional_transformation: str
"""
_attribute_map = {
'additional_transformation': {'key': 'additionalTransformation', 'type': 'str'},
}
def __init__(
self,
*,
additional_transformation: Optional[str] = None,
**kwargs
):
"""
:keyword additional_transformation:
:paramtype additional_transformation: str
"""
super(DatasetRegistrationOptions, self).__init__(**kwargs)
self.additional_transformation = additional_transformation
class DataSettings(msrest.serialization.Model):
"""DataSettings.
:ivar target_column_name:
:vartype target_column_name: str
:ivar weight_column_name:
:vartype weight_column_name: str
:ivar positive_label:
:vartype positive_label: str
:ivar validation_data:
:vartype validation_data: ~flow.models.ValidationDataSettings
:ivar test_data:
:vartype test_data: ~flow.models.TestDataSettings
"""
_attribute_map = {
'target_column_name': {'key': 'targetColumnName', 'type': 'str'},
'weight_column_name': {'key': 'weightColumnName', 'type': 'str'},
'positive_label': {'key': 'positiveLabel', 'type': 'str'},
'validation_data': {'key': 'validationData', 'type': 'ValidationDataSettings'},
'test_data': {'key': 'testData', 'type': 'TestDataSettings'},
}
def __init__(
self,
*,
target_column_name: Optional[str] = None,
weight_column_name: Optional[str] = None,
positive_label: Optional[str] = None,
validation_data: Optional["ValidationDataSettings"] = None,
test_data: Optional["TestDataSettings"] = None,
**kwargs
):
"""
:keyword target_column_name:
:paramtype target_column_name: str
:keyword weight_column_name:
:paramtype weight_column_name: str
:keyword positive_label:
:paramtype positive_label: str
:keyword validation_data:
:paramtype validation_data: ~flow.models.ValidationDataSettings
:keyword test_data:
:paramtype test_data: ~flow.models.TestDataSettings
"""
super(DataSettings, self).__init__(**kwargs)
self.target_column_name = target_column_name
self.weight_column_name = weight_column_name
self.positive_label = positive_label
self.validation_data = validation_data
self.test_data = test_data
class DatastoreSetting(msrest.serialization.Model):
"""DatastoreSetting.
:ivar data_store_name:
:vartype data_store_name: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
"""
super(DatastoreSetting, self).__init__(**kwargs)
self.data_store_name = data_store_name
class DataTransferCloudConfiguration(msrest.serialization.Model):
"""DataTransferCloudConfiguration.
:ivar allow_overwrite:
:vartype allow_overwrite: bool
"""
_attribute_map = {
'allow_overwrite': {'key': 'AllowOverwrite', 'type': 'bool'},
}
def __init__(
self,
*,
allow_overwrite: Optional[bool] = None,
**kwargs
):
"""
:keyword allow_overwrite:
:paramtype allow_overwrite: bool
"""
super(DataTransferCloudConfiguration, self).__init__(**kwargs)
self.allow_overwrite = allow_overwrite
class DataTransferSink(msrest.serialization.Model):
"""DataTransferSink.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_sink:
:vartype database_sink: ~flow.models.DatabaseSink
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_sink': {'key': 'databaseSink', 'type': 'DatabaseSink'},
}
def __init__(
self,
*,
type: Optional[Union[str, "DataTransferStorageType"]] = None,
file_system: Optional["FileSystem"] = None,
database_sink: Optional["DatabaseSink"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_sink:
:paramtype database_sink: ~flow.models.DatabaseSink
"""
super(DataTransferSink, self).__init__(**kwargs)
self.type = type
self.file_system = file_system
self.database_sink = database_sink
class DataTransferSource(msrest.serialization.Model):
"""DataTransferSource.
:ivar type: Possible values include: "DataBase", "FileSystem".
:vartype type: str or ~flow.models.DataTransferStorageType
:ivar file_system:
:vartype file_system: ~flow.models.FileSystem
:ivar database_source:
:vartype database_source: ~flow.models.DatabaseSource
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'file_system': {'key': 'fileSystem', 'type': 'FileSystem'},
'database_source': {'key': 'databaseSource', 'type': 'DatabaseSource'},
}
def __init__(
self,
*,
type: Optional[Union[str, "DataTransferStorageType"]] = None,
file_system: Optional["FileSystem"] = None,
database_source: Optional["DatabaseSource"] = None,
**kwargs
):
"""
:keyword type: Possible values include: "DataBase", "FileSystem".
:paramtype type: str or ~flow.models.DataTransferStorageType
:keyword file_system:
:paramtype file_system: ~flow.models.FileSystem
:keyword database_source:
:paramtype database_source: ~flow.models.DatabaseSource
"""
super(DataTransferSource, self).__init__(**kwargs)
self.type = type
self.file_system = file_system
self.database_source = database_source
class DataTransferV2CloudSetting(msrest.serialization.Model):
"""DataTransferV2CloudSetting.
:ivar task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:vartype task_type: str or ~flow.models.DataTransferTaskType
:ivar compute_name:
:vartype compute_name: str
:ivar copy_data_task:
:vartype copy_data_task: ~flow.models.CopyDataTask
:ivar import_data_task:
:vartype import_data_task: ~flow.models.ImportDataTask
:ivar export_data_task:
:vartype export_data_task: ~flow.models.ExportDataTask
:ivar data_transfer_sources: This is a dictionary.
:vartype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:ivar data_transfer_sinks: This is a dictionary.
:vartype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:ivar data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:vartype data_copy_mode: str or ~flow.models.DataCopyMode
"""
_attribute_map = {
'task_type': {'key': 'taskType', 'type': 'str'},
'compute_name': {'key': 'ComputeName', 'type': 'str'},
'copy_data_task': {'key': 'CopyDataTask', 'type': 'CopyDataTask'},
'import_data_task': {'key': 'ImportDataTask', 'type': 'ImportDataTask'},
'export_data_task': {'key': 'ExportDataTask', 'type': 'ExportDataTask'},
'data_transfer_sources': {'key': 'DataTransferSources', 'type': '{DataTransferSource}'},
'data_transfer_sinks': {'key': 'DataTransferSinks', 'type': '{DataTransferSink}'},
'data_copy_mode': {'key': 'DataCopyMode', 'type': 'str'},
}
def __init__(
self,
*,
task_type: Optional[Union[str, "DataTransferTaskType"]] = None,
compute_name: Optional[str] = None,
copy_data_task: Optional["CopyDataTask"] = None,
import_data_task: Optional["ImportDataTask"] = None,
export_data_task: Optional["ExportDataTask"] = None,
data_transfer_sources: Optional[Dict[str, "DataTransferSource"]] = None,
data_transfer_sinks: Optional[Dict[str, "DataTransferSink"]] = None,
data_copy_mode: Optional[Union[str, "DataCopyMode"]] = None,
**kwargs
):
"""
:keyword task_type: Possible values include: "ImportData", "ExportData", "CopyData".
:paramtype task_type: str or ~flow.models.DataTransferTaskType
:keyword compute_name:
:paramtype compute_name: str
:keyword copy_data_task:
:paramtype copy_data_task: ~flow.models.CopyDataTask
:keyword import_data_task:
:paramtype import_data_task: ~flow.models.ImportDataTask
:keyword export_data_task:
:paramtype export_data_task: ~flow.models.ExportDataTask
:keyword data_transfer_sources: This is a dictionary.
:paramtype data_transfer_sources: dict[str, ~flow.models.DataTransferSource]
:keyword data_transfer_sinks: This is a dictionary.
:paramtype data_transfer_sinks: dict[str, ~flow.models.DataTransferSink]
:keyword data_copy_mode: Possible values include: "MergeWithOverwrite", "FailIfConflict".
:paramtype data_copy_mode: str or ~flow.models.DataCopyMode
"""
super(DataTransferV2CloudSetting, self).__init__(**kwargs)
self.task_type = task_type
self.compute_name = compute_name
self.copy_data_task = copy_data_task
self.import_data_task = import_data_task
self.export_data_task = export_data_task
self.data_transfer_sources = data_transfer_sources
self.data_transfer_sinks = data_transfer_sinks
self.data_copy_mode = data_copy_mode
class DataTypeCreationInfo(msrest.serialization.Model):
"""DataTypeCreationInfo.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar is_directory:
:vartype is_directory: bool
:ivar file_extension:
:vartype file_extension: str
:ivar parent_data_type_ids:
:vartype parent_data_type_ids: list[str]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_directory': {'key': 'isDirectory', 'type': 'bool'},
'file_extension': {'key': 'fileExtension', 'type': 'str'},
'parent_data_type_ids': {'key': 'parentDataTypeIds', 'type': '[str]'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
is_directory: Optional[bool] = None,
file_extension: Optional[str] = None,
parent_data_type_ids: Optional[List[str]] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword is_directory:
:paramtype is_directory: bool
:keyword file_extension:
:paramtype file_extension: str
:keyword parent_data_type_ids:
:paramtype parent_data_type_ids: list[str]
"""
super(DataTypeCreationInfo, self).__init__(**kwargs)
self.id = id
self.name = name
self.description = description
self.is_directory = is_directory
self.file_extension = file_extension
self.parent_data_type_ids = parent_data_type_ids
class DBFSReference(msrest.serialization.Model):
"""DBFSReference.
:ivar relative_path:
:vartype relative_path: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
"""
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
}
def __init__(
self,
*,
relative_path: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
**kwargs
):
"""
:keyword relative_path:
:paramtype relative_path: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
"""
super(DBFSReference, self).__init__(**kwargs)
self.relative_path = relative_path
self.aml_data_store_name = aml_data_store_name
class DbfsStorageInfoDto(msrest.serialization.Model):
"""DbfsStorageInfoDto.
:ivar destination:
:vartype destination: str
"""
_attribute_map = {
'destination': {'key': 'destination', 'type': 'str'},
}
def __init__(
self,
*,
destination: Optional[str] = None,
**kwargs
):
"""
:keyword destination:
:paramtype destination: str
"""
super(DbfsStorageInfoDto, self).__init__(**kwargs)
self.destination = destination
class DebugInfoResponse(msrest.serialization.Model):
"""Internal debugging information not intended for external clients.
:ivar type: The type.
:vartype type: str
:ivar message: The message.
:vartype message: str
:ivar stack_trace: The stack trace.
:vartype stack_trace: str
:ivar inner_exception: Internal debugging information not intended for external clients.
:vartype inner_exception: ~flow.models.DebugInfoResponse
:ivar data: This is a dictionary.
:vartype data: dict[str, any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'stack_trace': {'key': 'stackTrace', 'type': 'str'},
'inner_exception': {'key': 'innerException', 'type': 'DebugInfoResponse'},
'data': {'key': 'data', 'type': '{object}'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
}
def __init__(
self,
*,
type: Optional[str] = None,
message: Optional[str] = None,
stack_trace: Optional[str] = None,
inner_exception: Optional["DebugInfoResponse"] = None,
data: Optional[Dict[str, Any]] = None,
error_response: Optional["ErrorResponse"] = None,
**kwargs
):
"""
:keyword type: The type.
:paramtype type: str
:keyword message: The message.
:paramtype message: str
:keyword stack_trace: The stack trace.
:paramtype stack_trace: str
:keyword inner_exception: Internal debugging information not intended for external clients.
:paramtype inner_exception: ~flow.models.DebugInfoResponse
:keyword data: This is a dictionary.
:paramtype data: dict[str, any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
"""
super(DebugInfoResponse, self).__init__(**kwargs)
self.type = type
self.message = message
self.stack_trace = stack_trace
self.inner_exception = inner_exception
self.data = data
self.error_response = error_response
class DeployFlowRequest(msrest.serialization.Model):
"""DeployFlowRequest.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar source_flow_run_id:
:vartype source_flow_run_id: str
:ivar source_flow_id:
:vartype source_flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar output_names_included_in_endpoint_response:
:vartype output_names_included_in_endpoint_response: list[str]
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar endpoint_description:
:vartype endpoint_description: str
:ivar auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:vartype auth_mode: str or ~flow.models.EndpointAuthMode
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar endpoint_tags: This is a dictionary.
:vartype endpoint_tags: dict[str, str]
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar environment:
:vartype environment: str
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar deployment_tags: This is a dictionary.
:vartype deployment_tags: dict[str, str]
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar enable_model_data_collector:
:vartype enable_model_data_collector: bool
:ivar skip_update_traffic_to_full:
:vartype skip_update_traffic_to_full: bool
:ivar enable_streaming_response:
:vartype enable_streaming_response: bool
:ivar use_flow_snapshot_to_deploy:
:vartype use_flow_snapshot_to_deploy: bool
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar auto_grant_connection_permission:
:vartype auto_grant_connection_permission: bool
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'source_flow_run_id': {'key': 'sourceFlowRunId', 'type': 'str'},
'source_flow_id': {'key': 'sourceFlowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'output_names_included_in_endpoint_response': {'key': 'outputNamesIncludedInEndpointResponse', 'type': '[str]'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'endpoint_description': {'key': 'endpointDescription', 'type': 'str'},
'auth_mode': {'key': 'authMode', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'endpoint_tags': {'key': 'endpointTags', 'type': '{str}'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'deployment_tags': {'key': 'deploymentTags', 'type': '{str}'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'enable_model_data_collector': {'key': 'enableModelDataCollector', 'type': 'bool'},
'skip_update_traffic_to_full': {'key': 'skipUpdateTrafficToFull', 'type': 'bool'},
'enable_streaming_response': {'key': 'enableStreamingResponse', 'type': 'bool'},
'use_flow_snapshot_to_deploy': {'key': 'useFlowSnapshotToDeploy', 'type': 'bool'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'auto_grant_connection_permission': {'key': 'autoGrantConnectionPermission', 'type': 'bool'},
}
def __init__(
self,
*,
source_resource_id: Optional[str] = None,
source_flow_run_id: Optional[str] = None,
source_flow_id: Optional[str] = None,
flow: Optional["Flow"] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None,
output_names_included_in_endpoint_response: Optional[List[str]] = None,
endpoint_name: Optional[str] = None,
endpoint_description: Optional[str] = None,
auth_mode: Optional[Union[str, "EndpointAuthMode"]] = None,
identity: Optional["ManagedServiceIdentity"] = None,
endpoint_tags: Optional[Dict[str, str]] = None,
connection_overrides: Optional[List["ConnectionOverrideSetting"]] = None,
use_workspace_connection: Optional[bool] = None,
deployment_name: Optional[str] = None,
environment: Optional[str] = None,
environment_variables: Optional[Dict[str, str]] = None,
deployment_tags: Optional[Dict[str, str]] = None,
app_insights_enabled: Optional[bool] = None,
enable_model_data_collector: Optional[bool] = None,
skip_update_traffic_to_full: Optional[bool] = None,
enable_streaming_response: Optional[bool] = None,
use_flow_snapshot_to_deploy: Optional[bool] = None,
instance_type: Optional[str] = None,
instance_count: Optional[int] = None,
auto_grant_connection_permission: Optional[bool] = None,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword source_flow_run_id:
:paramtype source_flow_run_id: str
:keyword source_flow_id:
:paramtype source_flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword output_names_included_in_endpoint_response:
:paramtype output_names_included_in_endpoint_response: list[str]
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword endpoint_description:
:paramtype endpoint_description: str
:keyword auth_mode: Possible values include: "AMLToken", "Key", "AADToken".
:paramtype auth_mode: str or ~flow.models.EndpointAuthMode
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword endpoint_tags: This is a dictionary.
:paramtype endpoint_tags: dict[str, str]
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword environment:
:paramtype environment: str
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword deployment_tags: This is a dictionary.
:paramtype deployment_tags: dict[str, str]
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword enable_model_data_collector:
:paramtype enable_model_data_collector: bool
:keyword skip_update_traffic_to_full:
:paramtype skip_update_traffic_to_full: bool
:keyword enable_streaming_response:
:paramtype enable_streaming_response: bool
:keyword use_flow_snapshot_to_deploy:
:paramtype use_flow_snapshot_to_deploy: bool
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword auto_grant_connection_permission:
:paramtype auto_grant_connection_permission: bool
"""
super(DeployFlowRequest, self).__init__(**kwargs)
self.source_resource_id = source_resource_id
self.source_flow_run_id = source_flow_run_id
self.source_flow_id = source_flow_id
self.flow = flow
self.flow_type = flow_type
self.flow_submit_run_settings = flow_submit_run_settings
self.output_names_included_in_endpoint_response = output_names_included_in_endpoint_response
self.endpoint_name = endpoint_name
self.endpoint_description = endpoint_description
self.auth_mode = auth_mode
self.identity = identity
self.endpoint_tags = endpoint_tags
self.connection_overrides = connection_overrides
self.use_workspace_connection = use_workspace_connection
self.deployment_name = deployment_name
self.environment = environment
self.environment_variables = environment_variables
self.deployment_tags = deployment_tags
self.app_insights_enabled = app_insights_enabled
self.enable_model_data_collector = enable_model_data_collector
self.skip_update_traffic_to_full = skip_update_traffic_to_full
self.enable_streaming_response = enable_streaming_response
self.use_flow_snapshot_to_deploy = use_flow_snapshot_to_deploy
self.instance_type = instance_type
self.instance_count = instance_count
self.auto_grant_connection_permission = auto_grant_connection_permission
class DeploymentInfo(msrest.serialization.Model):
"""DeploymentInfo.
:ivar operation_id:
:vartype operation_id: str
:ivar service_id:
:vartype service_id: str
:ivar service_name:
:vartype service_name: str
:ivar status_detail:
:vartype status_detail: str
"""
_attribute_map = {
'operation_id': {'key': 'operationId', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'service_name': {'key': 'serviceName', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
}
def __init__(
self,
*,
operation_id: Optional[str] = None,
service_id: Optional[str] = None,
service_name: Optional[str] = None,
status_detail: Optional[str] = None,
**kwargs
):
"""
:keyword operation_id:
:paramtype operation_id: str
:keyword service_id:
:paramtype service_id: str
:keyword service_name:
:paramtype service_name: str
:keyword status_detail:
:paramtype status_detail: str
"""
super(DeploymentInfo, self).__init__(**kwargs)
self.operation_id = operation_id
self.service_id = service_id
self.service_name = service_name
self.status_detail = status_detail
class DistributionConfiguration(msrest.serialization.Model):
"""DistributionConfiguration.
:ivar distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:vartype distribution_type: str or ~flow.models.DistributionType
"""
_attribute_map = {
'distribution_type': {'key': 'distributionType', 'type': 'str'},
}
def __init__(
self,
*,
distribution_type: Optional[Union[str, "DistributionType"]] = None,
**kwargs
):
"""
:keyword distribution_type: Possible values include: "PyTorch", "TensorFlow", "Mpi", "Ray".
:paramtype distribution_type: str or ~flow.models.DistributionType
"""
super(DistributionConfiguration, self).__init__(**kwargs)
self.distribution_type = distribution_type
class DistributionParameter(msrest.serialization.Model):
"""DistributionParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar description:
:vartype description: str
:ivar input_type: Possible values include: "Text", "Number".
:vartype input_type: str or ~flow.models.DistributionParameterEnum
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'input_type': {'key': 'inputType', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
description: Optional[str] = None,
input_type: Optional[Union[str, "DistributionParameterEnum"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword description:
:paramtype description: str
:keyword input_type: Possible values include: "Text", "Number".
:paramtype input_type: str or ~flow.models.DistributionParameterEnum
"""
super(DistributionParameter, self).__init__(**kwargs)
self.name = name
self.label = label
self.description = description
self.input_type = input_type
class DockerBuildContext(msrest.serialization.Model):
"""DockerBuildContext.
:ivar location_type: Possible values include: "Git", "StorageAccount".
:vartype location_type: str or ~flow.models.BuildContextLocationType
:ivar location:
:vartype location: str
:ivar dockerfile_path:
:vartype dockerfile_path: str
"""
_attribute_map = {
'location_type': {'key': 'locationType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'},
}
def __init__(
self,
*,
location_type: Optional[Union[str, "BuildContextLocationType"]] = None,
location: Optional[str] = None,
dockerfile_path: Optional[str] = "Dockerfile",
**kwargs
):
"""
:keyword location_type: Possible values include: "Git", "StorageAccount".
:paramtype location_type: str or ~flow.models.BuildContextLocationType
:keyword location:
:paramtype location: str
:keyword dockerfile_path:
:paramtype dockerfile_path: str
"""
super(DockerBuildContext, self).__init__(**kwargs)
self.location_type = location_type
self.location = location
self.dockerfile_path = dockerfile_path
class DockerConfiguration(msrest.serialization.Model):
"""DockerConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
*,
use_docker: Optional[bool] = None,
shared_volumes: Optional[bool] = None,
arguments: Optional[List[str]] = None,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerConfiguration, self).__init__(**kwargs)
self.use_docker = use_docker
self.shared_volumes = shared_volumes
self.arguments = arguments
class DockerImagePlatform(msrest.serialization.Model):
"""DockerImagePlatform.
:ivar os:
:vartype os: str
:ivar architecture:
:vartype architecture: str
"""
_attribute_map = {
'os': {'key': 'os', 'type': 'str'},
'architecture': {'key': 'architecture', 'type': 'str'},
}
def __init__(
self,
*,
os: Optional[str] = None,
architecture: Optional[str] = None,
**kwargs
):
"""
:keyword os:
:paramtype os: str
:keyword architecture:
:paramtype architecture: str
"""
super(DockerImagePlatform, self).__init__(**kwargs)
self.os = os
self.architecture = architecture
class DockerSection(msrest.serialization.Model):
"""DockerSection.
:ivar base_image:
:vartype base_image: str
:ivar platform:
:vartype platform: ~flow.models.DockerImagePlatform
:ivar base_dockerfile:
:vartype base_dockerfile: str
:ivar build_context:
:vartype build_context: ~flow.models.DockerBuildContext
:ivar base_image_registry:
:vartype base_image_registry: ~flow.models.ContainerRegistry
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'platform': {'key': 'platform', 'type': 'DockerImagePlatform'},
'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
'build_context': {'key': 'buildContext', 'type': 'DockerBuildContext'},
'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
}
def __init__(
self,
*,
base_image: Optional[str] = None,
platform: Optional["DockerImagePlatform"] = None,
base_dockerfile: Optional[str] = None,
build_context: Optional["DockerBuildContext"] = None,
base_image_registry: Optional["ContainerRegistry"] = None,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword platform:
:paramtype platform: ~flow.models.DockerImagePlatform
:keyword base_dockerfile:
:paramtype base_dockerfile: str
:keyword build_context:
:paramtype build_context: ~flow.models.DockerBuildContext
:keyword base_image_registry:
:paramtype base_image_registry: ~flow.models.ContainerRegistry
"""
super(DockerSection, self).__init__(**kwargs)
self.base_image = base_image
self.platform = platform
self.base_dockerfile = base_dockerfile
self.build_context = build_context
self.base_image_registry = base_image_registry
class DockerSettingConfiguration(msrest.serialization.Model):
"""DockerSettingConfiguration.
:ivar use_docker:
:vartype use_docker: bool
:ivar shared_volumes:
:vartype shared_volumes: bool
:ivar shm_size:
:vartype shm_size: str
:ivar arguments:
:vartype arguments: list[str]
"""
_attribute_map = {
'use_docker': {'key': 'useDocker', 'type': 'bool'},
'shared_volumes': {'key': 'sharedVolumes', 'type': 'bool'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
}
def __init__(
self,
*,
use_docker: Optional[bool] = None,
shared_volumes: Optional[bool] = None,
shm_size: Optional[str] = None,
arguments: Optional[List[str]] = None,
**kwargs
):
"""
:keyword use_docker:
:paramtype use_docker: bool
:keyword shared_volumes:
:paramtype shared_volumes: bool
:keyword shm_size:
:paramtype shm_size: str
:keyword arguments:
:paramtype arguments: list[str]
"""
super(DockerSettingConfiguration, self).__init__(**kwargs)
self.use_docker = use_docker
self.shared_volumes = shared_volumes
self.shm_size = shm_size
self.arguments = arguments
class DoWhileControlFlowInfo(msrest.serialization.Model):
"""DoWhileControlFlowInfo.
:ivar output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:vartype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:ivar condition_output_port_name:
:vartype condition_output_port_name: str
:ivar run_settings:
:vartype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
_attribute_map = {
'output_port_name_to_input_port_names_mapping': {'key': 'outputPortNameToInputPortNamesMapping', 'type': '{[str]}'},
'condition_output_port_name': {'key': 'conditionOutputPortName', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': 'DoWhileControlFlowRunSettings'},
}
def __init__(
self,
*,
output_port_name_to_input_port_names_mapping: Optional[Dict[str, List[str]]] = None,
condition_output_port_name: Optional[str] = None,
run_settings: Optional["DoWhileControlFlowRunSettings"] = None,
**kwargs
):
"""
:keyword output_port_name_to_input_port_names_mapping: Dictionary of
<components·1sqg750·schemas·dowhilecontrolflowinfo·properties·outputportnametoinputportnamesmapping·additionalproperties>.
:paramtype output_port_name_to_input_port_names_mapping: dict[str, list[str]]
:keyword condition_output_port_name:
:paramtype condition_output_port_name: str
:keyword run_settings:
:paramtype run_settings: ~flow.models.DoWhileControlFlowRunSettings
"""
super(DoWhileControlFlowInfo, self).__init__(**kwargs)
self.output_port_name_to_input_port_names_mapping = output_port_name_to_input_port_names_mapping
self.condition_output_port_name = condition_output_port_name
self.run_settings = run_settings
class DoWhileControlFlowRunSettings(msrest.serialization.Model):
"""DoWhileControlFlowRunSettings.
:ivar max_loop_iteration_count:
:vartype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'max_loop_iteration_count': {'key': 'maxLoopIterationCount', 'type': 'ParameterAssignment'},
}
def __init__(
self,
*,
max_loop_iteration_count: Optional["ParameterAssignment"] = None,
**kwargs
):
"""
:keyword max_loop_iteration_count:
:paramtype max_loop_iteration_count: ~flow.models.ParameterAssignment
"""
super(DoWhileControlFlowRunSettings, self).__init__(**kwargs)
self.max_loop_iteration_count = max_loop_iteration_count
class DownloadResourceInfo(msrest.serialization.Model):
"""DownloadResourceInfo.
:ivar download_url:
:vartype download_url: str
:ivar size:
:vartype size: long
"""
_attribute_map = {
'download_url': {'key': 'downloadUrl', 'type': 'str'},
'size': {'key': 'size', 'type': 'long'},
}
def __init__(
self,
*,
download_url: Optional[str] = None,
size: Optional[int] = None,
**kwargs
):
"""
:keyword download_url:
:paramtype download_url: str
:keyword size:
:paramtype size: long
"""
super(DownloadResourceInfo, self).__init__(**kwargs)
self.download_url = download_url
self.size = size
class EndpointSetting(msrest.serialization.Model):
"""EndpointSetting.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar ssl_thumbprint:
:vartype ssl_thumbprint: str
:ivar endpoint:
:vartype endpoint: str
:ivar proxy_endpoint:
:vartype proxy_endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar enabled:
:vartype enabled: bool
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[str] = None,
port: Optional[int] = None,
ssl_thumbprint: Optional[str] = None,
endpoint: Optional[str] = None,
proxy_endpoint: Optional[str] = None,
status: Optional[str] = None,
error_message: Optional[str] = None,
enabled: Optional[bool] = None,
properties: Optional[Dict[str, str]] = None,
nodes: Optional[str] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword ssl_thumbprint:
:paramtype ssl_thumbprint: str
:keyword endpoint:
:paramtype endpoint: str
:keyword proxy_endpoint:
:paramtype proxy_endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword enabled:
:paramtype enabled: bool
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: str
"""
super(EndpointSetting, self).__init__(**kwargs)
self.type = type
self.port = port
self.ssl_thumbprint = ssl_thumbprint
self.endpoint = endpoint
self.proxy_endpoint = proxy_endpoint
self.status = status
self.error_message = error_message
self.enabled = enabled
self.properties = properties
self.nodes = nodes
class EntityInterface(msrest.serialization.Model):
"""EntityInterface.
:ivar parameters:
:vartype parameters: list[~flow.models.Parameter]
:ivar ports:
:vartype ports: ~flow.models.NodePortInterface
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.Parameter]
:ivar data_path_parameters:
:vartype data_path_parameters: list[~flow.models.DataPathParameter]
:ivar data_path_parameter_list:
:vartype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:ivar asset_output_settings_parameter_list:
:vartype asset_output_settings_parameter_list: list[~flow.models.AssetOutputSettingsParameter]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[Parameter]'},
'ports': {'key': 'ports', 'type': 'NodePortInterface'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[Parameter]'},
'data_path_parameters': {'key': 'dataPathParameters', 'type': '[DataPathParameter]'},
'data_path_parameter_list': {'key': 'dataPathParameterList', 'type': '[DataSetPathParameter]'},
'asset_output_settings_parameter_list': {'key': 'AssetOutputSettingsParameterList', 'type': '[AssetOutputSettingsParameter]'},
}
def __init__(
self,
*,
parameters: Optional[List["Parameter"]] = None,
ports: Optional["NodePortInterface"] = None,
metadata_parameters: Optional[List["Parameter"]] = None,
data_path_parameters: Optional[List["DataPathParameter"]] = None,
data_path_parameter_list: Optional[List["DataSetPathParameter"]] = None,
asset_output_settings_parameter_list: Optional[List["AssetOutputSettingsParameter"]] = None,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[~flow.models.Parameter]
:keyword ports:
:paramtype ports: ~flow.models.NodePortInterface
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.Parameter]
:keyword data_path_parameters:
:paramtype data_path_parameters: list[~flow.models.DataPathParameter]
:keyword data_path_parameter_list:
:paramtype data_path_parameter_list: list[~flow.models.DataSetPathParameter]
:keyword asset_output_settings_parameter_list:
:paramtype asset_output_settings_parameter_list:
list[~flow.models.AssetOutputSettingsParameter]
"""
super(EntityInterface, self).__init__(**kwargs)
self.parameters = parameters
self.ports = ports
self.metadata_parameters = metadata_parameters
self.data_path_parameters = data_path_parameters
self.data_path_parameter_list = data_path_parameter_list
self.asset_output_settings_parameter_list = asset_output_settings_parameter_list
class EntrySetting(msrest.serialization.Model):
"""EntrySetting.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
class_name: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(EntrySetting, self).__init__(**kwargs)
self.file = file
self.class_name = class_name
class EnumParameterRule(msrest.serialization.Model):
"""EnumParameterRule.
:ivar valid_values:
:vartype valid_values: list[str]
"""
_attribute_map = {
'valid_values': {'key': 'validValues', 'type': '[str]'},
}
def __init__(
self,
*,
valid_values: Optional[List[str]] = None,
**kwargs
):
"""
:keyword valid_values:
:paramtype valid_values: list[str]
"""
super(EnumParameterRule, self).__init__(**kwargs)
self.valid_values = valid_values
class EnvironmentConfiguration(msrest.serialization.Model):
"""EnvironmentConfiguration.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar use_environment_definition:
:vartype use_environment_definition: bool
:ivar environment_definition_string:
:vartype environment_definition_string: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'use_environment_definition': {'key': 'useEnvironmentDefinition', 'type': 'bool'},
'environment_definition_string': {'key': 'environmentDefinitionString', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
version: Optional[str] = None,
use_environment_definition: Optional[bool] = None,
environment_definition_string: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword use_environment_definition:
:paramtype use_environment_definition: bool
:keyword environment_definition_string:
:paramtype environment_definition_string: str
"""
super(EnvironmentConfiguration, self).__init__(**kwargs)
self.name = name
self.version = version
self.use_environment_definition = use_environment_definition
self.environment_definition_string = environment_definition_string
class EnvironmentDefinition(msrest.serialization.Model):
"""EnvironmentDefinition.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar asset_id:
:vartype asset_id: str
:ivar auto_rebuild:
:vartype auto_rebuild: bool
:ivar python:
:vartype python: ~flow.models.PythonSection
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar docker:
:vartype docker: ~flow.models.DockerSection
:ivar spark:
:vartype spark: ~flow.models.SparkSection
:ivar r:
:vartype r: ~flow.models.RSection
:ivar inferencing_stack_version:
:vartype inferencing_stack_version: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'auto_rebuild': {'key': 'autoRebuild', 'type': 'bool'},
'python': {'key': 'python', 'type': 'PythonSection'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'docker': {'key': 'docker', 'type': 'DockerSection'},
'spark': {'key': 'spark', 'type': 'SparkSection'},
'r': {'key': 'r', 'type': 'RSection'},
'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
version: Optional[str] = None,
asset_id: Optional[str] = None,
auto_rebuild: Optional[bool] = None,
python: Optional["PythonSection"] = None,
environment_variables: Optional[Dict[str, str]] = None,
docker: Optional["DockerSection"] = None,
spark: Optional["SparkSection"] = None,
r: Optional["RSection"] = None,
inferencing_stack_version: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword asset_id:
:paramtype asset_id: str
:keyword auto_rebuild:
:paramtype auto_rebuild: bool
:keyword python:
:paramtype python: ~flow.models.PythonSection
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword docker:
:paramtype docker: ~flow.models.DockerSection
:keyword spark:
:paramtype spark: ~flow.models.SparkSection
:keyword r:
:paramtype r: ~flow.models.RSection
:keyword inferencing_stack_version:
:paramtype inferencing_stack_version: str
"""
super(EnvironmentDefinition, self).__init__(**kwargs)
self.name = name
self.version = version
self.asset_id = asset_id
self.auto_rebuild = auto_rebuild
self.python = python
self.environment_variables = environment_variables
self.docker = docker
self.spark = spark
self.r = r
self.inferencing_stack_version = inferencing_stack_version
class EnvironmentDefinitionDto(msrest.serialization.Model):
"""EnvironmentDefinitionDto.
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
*,
environment_name: Optional[str] = None,
environment_version: Optional[str] = None,
intellectual_property_publisher: Optional[str] = None,
**kwargs
):
"""
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(EnvironmentDefinitionDto, self).__init__(**kwargs)
self.environment_name = environment_name
self.environment_version = environment_version
self.intellectual_property_publisher = intellectual_property_publisher
class EPRPipelineRunErrorClassificationRequest(msrest.serialization.Model):
"""EPRPipelineRunErrorClassificationRequest.
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_id:
:vartype run_id: str
:ivar task_result:
:vartype task_result: str
:ivar failure_type:
:vartype failure_type: str
:ivar failure_name:
:vartype failure_name: str
:ivar responsible_team:
:vartype responsible_team: str
"""
_attribute_map = {
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'task_result': {'key': 'taskResult', 'type': 'str'},
'failure_type': {'key': 'failureType', 'type': 'str'},
'failure_name': {'key': 'failureName', 'type': 'str'},
'responsible_team': {'key': 'responsibleTeam', 'type': 'str'},
}
def __init__(
self,
*,
root_run_id: Optional[str] = None,
run_id: Optional[str] = None,
task_result: Optional[str] = None,
failure_type: Optional[str] = None,
failure_name: Optional[str] = None,
responsible_team: Optional[str] = None,
**kwargs
):
"""
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_id:
:paramtype run_id: str
:keyword task_result:
:paramtype task_result: str
:keyword failure_type:
:paramtype failure_type: str
:keyword failure_name:
:paramtype failure_name: str
:keyword responsible_team:
:paramtype responsible_team: str
"""
super(EPRPipelineRunErrorClassificationRequest, self).__init__(**kwargs)
self.root_run_id = root_run_id
self.run_id = run_id
self.task_result = task_result
self.failure_type = failure_type
self.failure_name = failure_name
self.responsible_team = responsible_team
class ErrorAdditionalInfo(msrest.serialization.Model):
"""The resource management error additional info.
:ivar type: The additional info type.
:vartype type: str
:ivar info: The additional info.
:vartype info: any
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'object'},
}
def __init__(
self,
*,
type: Optional[str] = None,
info: Optional[Any] = None,
**kwargs
):
"""
:keyword type: The additional info type.
:paramtype type: str
:keyword info: The additional info.
:paramtype info: any
"""
super(ErrorAdditionalInfo, self).__init__(**kwargs)
self.type = type
self.info = info
class ErrorResponse(msrest.serialization.Model):
"""The error response.
:ivar error: The root error.
:vartype error: ~flow.models.RootError
:ivar correlation: Dictionary containing correlation details for the error.
:vartype correlation: dict[str, str]
:ivar environment: The hosting environment.
:vartype environment: str
:ivar location: The Azure region.
:vartype location: str
:ivar time: The time in UTC.
:vartype time: ~datetime.datetime
:ivar component_name: Component name where error originated/encountered.
:vartype component_name: str
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'RootError'},
'correlation': {'key': 'correlation', 'type': '{str}'},
'environment': {'key': 'environment', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
}
def __init__(
self,
*,
error: Optional["RootError"] = None,
correlation: Optional[Dict[str, str]] = None,
environment: Optional[str] = None,
location: Optional[str] = None,
time: Optional[datetime.datetime] = None,
component_name: Optional[str] = None,
**kwargs
):
"""
:keyword error: The root error.
:paramtype error: ~flow.models.RootError
:keyword correlation: Dictionary containing correlation details for the error.
:paramtype correlation: dict[str, str]
:keyword environment: The hosting environment.
:paramtype environment: str
:keyword location: The Azure region.
:paramtype location: str
:keyword time: The time in UTC.
:paramtype time: ~datetime.datetime
:keyword component_name: Component name where error originated/encountered.
:paramtype component_name: str
"""
super(ErrorResponse, self).__init__(**kwargs)
self.error = error
self.correlation = correlation
self.environment = environment
self.location = location
self.time = time
self.component_name = component_name
class EsCloudConfiguration(msrest.serialization.Model):
"""EsCloudConfiguration.
:ivar enable_output_to_file_based_on_data_type_id:
:vartype enable_output_to_file_based_on_data_type_id: bool
:ivar environment:
:vartype environment: ~flow.models.EnvironmentConfiguration
:ivar hyper_drive_configuration:
:vartype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:ivar k8_s_config:
:vartype k8_s_config: ~flow.models.K8SConfiguration
:ivar resource_config:
:vartype resource_config: ~flow.models.AEVAResourceConfiguration
:ivar torch_distributed_config:
:vartype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:ivar target_selector_config:
:vartype target_selector_config: ~flow.models.TargetSelectorConfiguration
:ivar docker_config:
:vartype docker_config: ~flow.models.DockerSettingConfiguration
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: int
:ivar identity:
:vartype identity: ~flow.models.IdentitySetting
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar run_config:
:vartype run_config: str
"""
_attribute_map = {
'enable_output_to_file_based_on_data_type_id': {'key': 'enableOutputToFileBasedOnDataTypeId', 'type': 'bool'},
'environment': {'key': 'environment', 'type': 'EnvironmentConfiguration'},
'hyper_drive_configuration': {'key': 'hyperDriveConfiguration', 'type': 'HyperDriveConfiguration'},
'k8_s_config': {'key': 'k8sConfig', 'type': 'K8SConfiguration'},
'resource_config': {'key': 'resourceConfig', 'type': 'AEVAResourceConfiguration'},
'torch_distributed_config': {'key': 'torchDistributedConfig', 'type': 'TorchDistributedConfiguration'},
'target_selector_config': {'key': 'targetSelectorConfig', 'type': 'TargetSelectorConfiguration'},
'docker_config': {'key': 'dockerConfig', 'type': 'DockerSettingConfiguration'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'int'},
'identity': {'key': 'identity', 'type': 'IdentitySetting'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'run_config': {'key': 'runConfig', 'type': 'str'},
}
def __init__(
self,
*,
enable_output_to_file_based_on_data_type_id: Optional[bool] = None,
environment: Optional["EnvironmentConfiguration"] = None,
hyper_drive_configuration: Optional["HyperDriveConfiguration"] = None,
k8_s_config: Optional["K8SConfiguration"] = None,
resource_config: Optional["AEVAResourceConfiguration"] = None,
torch_distributed_config: Optional["TorchDistributedConfiguration"] = None,
target_selector_config: Optional["TargetSelectorConfiguration"] = None,
docker_config: Optional["DockerSettingConfiguration"] = None,
environment_variables: Optional[Dict[str, str]] = None,
max_run_duration_seconds: Optional[int] = None,
identity: Optional["IdentitySetting"] = None,
application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None,
run_config: Optional[str] = None,
**kwargs
):
"""
:keyword enable_output_to_file_based_on_data_type_id:
:paramtype enable_output_to_file_based_on_data_type_id: bool
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentConfiguration
:keyword hyper_drive_configuration:
:paramtype hyper_drive_configuration: ~flow.models.HyperDriveConfiguration
:keyword k8_s_config:
:paramtype k8_s_config: ~flow.models.K8SConfiguration
:keyword resource_config:
:paramtype resource_config: ~flow.models.AEVAResourceConfiguration
:keyword torch_distributed_config:
:paramtype torch_distributed_config: ~flow.models.TorchDistributedConfiguration
:keyword target_selector_config:
:paramtype target_selector_config: ~flow.models.TargetSelectorConfiguration
:keyword docker_config:
:paramtype docker_config: ~flow.models.DockerSettingConfiguration
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: int
:keyword identity:
:paramtype identity: ~flow.models.IdentitySetting
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword run_config:
:paramtype run_config: str
"""
super(EsCloudConfiguration, self).__init__(**kwargs)
self.enable_output_to_file_based_on_data_type_id = enable_output_to_file_based_on_data_type_id
self.environment = environment
self.hyper_drive_configuration = hyper_drive_configuration
self.k8_s_config = k8_s_config
self.resource_config = resource_config
self.torch_distributed_config = torch_distributed_config
self.target_selector_config = target_selector_config
self.docker_config = docker_config
self.environment_variables = environment_variables
self.max_run_duration_seconds = max_run_duration_seconds
self.identity = identity
self.application_endpoints = application_endpoints
self.run_config = run_config
class EvaluationFlowRunSettings(msrest.serialization.Model):
"""EvaluationFlowRunSettings.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar connection_overrides:
:vartype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'connection_overrides': {'key': 'connectionOverrides', 'type': '[ConnectionOverrideSetting]'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
flow_run_id: Optional[str] = None,
flow_run_display_name: Optional[str] = None,
batch_data_input: Optional["BatchDataInput"] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
data_inputs: Optional[Dict[str, str]] = None,
connection_overrides: Optional[List["ConnectionOverrideSetting"]] = None,
runtime_name: Optional[str] = None,
aml_compute_name: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword connection_overrides:
:paramtype connection_overrides: list[~flow.models.ConnectionOverrideSetting]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(EvaluationFlowRunSettings, self).__init__(**kwargs)
self.flow_run_id = flow_run_id
self.flow_run_display_name = flow_run_display_name
self.batch_data_input = batch_data_input
self.inputs_mapping = inputs_mapping
self.data_inputs = data_inputs
self.connection_overrides = connection_overrides
self.runtime_name = runtime_name
self.aml_compute_name = aml_compute_name
self.properties = properties
class ExampleRequest(msrest.serialization.Model):
"""ExampleRequest.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, list[list[any]]]
:ivar global_parameters: This is a dictionary.
:vartype global_parameters: dict[str, any]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{[[object]]}'},
'global_parameters': {'key': 'globalParameters', 'type': '{object}'},
}
def __init__(
self,
*,
inputs: Optional[Dict[str, List[List[Any]]]] = None,
global_parameters: Optional[Dict[str, Any]] = None,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, list[list[any]]]
:keyword global_parameters: This is a dictionary.
:paramtype global_parameters: dict[str, any]
"""
super(ExampleRequest, self).__init__(**kwargs)
self.inputs = inputs
self.global_parameters = global_parameters
class ExecutionContextDto(msrest.serialization.Model):
"""ExecutionContextDto.
:ivar executable:
:vartype executable: str
:ivar user_code:
:vartype user_code: str
:ivar arguments:
:vartype arguments: str
"""
_attribute_map = {
'executable': {'key': 'executable', 'type': 'str'},
'user_code': {'key': 'userCode', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(
self,
*,
executable: Optional[str] = None,
user_code: Optional[str] = None,
arguments: Optional[str] = None,
**kwargs
):
"""
:keyword executable:
:paramtype executable: str
:keyword user_code:
:paramtype user_code: str
:keyword arguments:
:paramtype arguments: str
"""
super(ExecutionContextDto, self).__init__(**kwargs)
self.executable = executable
self.user_code = user_code
self.arguments = arguments
class ExecutionDataLocation(msrest.serialization.Model):
"""ExecutionDataLocation.
:ivar dataset:
:vartype dataset: ~flow.models.RunDatasetReference
:ivar data_path:
:vartype data_path: ~flow.models.ExecutionDataPath
:ivar uri:
:vartype uri: ~flow.models.UriReference
:ivar type:
:vartype type: str
"""
_attribute_map = {
'dataset': {'key': 'dataset', 'type': 'RunDatasetReference'},
'data_path': {'key': 'dataPath', 'type': 'ExecutionDataPath'},
'uri': {'key': 'uri', 'type': 'UriReference'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
dataset: Optional["RunDatasetReference"] = None,
data_path: Optional["ExecutionDataPath"] = None,
uri: Optional["UriReference"] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword dataset:
:paramtype dataset: ~flow.models.RunDatasetReference
:keyword data_path:
:paramtype data_path: ~flow.models.ExecutionDataPath
:keyword uri:
:paramtype uri: ~flow.models.UriReference
:keyword type:
:paramtype type: str
"""
super(ExecutionDataLocation, self).__init__(**kwargs)
self.dataset = dataset
self.data_path = data_path
self.uri = uri
self.type = type
class ExecutionDataPath(msrest.serialization.Model):
"""ExecutionDataPath.
:ivar datastore_name:
:vartype datastore_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'datastore_name': {'key': 'datastoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
datastore_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword datastore_name:
:paramtype datastore_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(ExecutionDataPath, self).__init__(**kwargs)
self.datastore_name = datastore_name
self.relative_path = relative_path
class ExecutionGlobsOptions(msrest.serialization.Model):
"""ExecutionGlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
*,
glob_patterns: Optional[List[str]] = None,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(ExecutionGlobsOptions, self).__init__(**kwargs)
self.glob_patterns = glob_patterns
class ExperimentComputeMetaInfo(msrest.serialization.Model):
"""ExperimentComputeMetaInfo.
:ivar current_node_count:
:vartype current_node_count: int
:ivar target_node_count:
:vartype target_node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar min_node_count:
:vartype min_node_count: int
:ivar idle_node_count:
:vartype idle_node_count: int
:ivar running_node_count:
:vartype running_node_count: int
:ivar preparing_node_count:
:vartype preparing_node_count: int
:ivar unusable_node_count:
:vartype unusable_node_count: int
:ivar leaving_node_count:
:vartype leaving_node_count: int
:ivar preempted_node_count:
:vartype preempted_node_count: int
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
*,
current_node_count: Optional[int] = None,
target_node_count: Optional[int] = None,
max_node_count: Optional[int] = None,
min_node_count: Optional[int] = None,
idle_node_count: Optional[int] = None,
running_node_count: Optional[int] = None,
preparing_node_count: Optional[int] = None,
unusable_node_count: Optional[int] = None,
leaving_node_count: Optional[int] = None,
preempted_node_count: Optional[int] = None,
vm_size: Optional[str] = None,
location: Optional[str] = None,
provisioning_state: Optional[str] = None,
state: Optional[str] = None,
os_type: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
created_by_studio: Optional[bool] = None,
is_gpu_type: Optional[bool] = None,
resource_id: Optional[str] = None,
compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword current_node_count:
:paramtype current_node_count: int
:keyword target_node_count:
:paramtype target_node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword min_node_count:
:paramtype min_node_count: int
:keyword idle_node_count:
:paramtype idle_node_count: int
:keyword running_node_count:
:paramtype running_node_count: int
:keyword preparing_node_count:
:paramtype preparing_node_count: int
:keyword unusable_node_count:
:paramtype unusable_node_count: int
:keyword leaving_node_count:
:paramtype leaving_node_count: int
:keyword preempted_node_count:
:paramtype preempted_node_count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(ExperimentComputeMetaInfo, self).__init__(**kwargs)
self.current_node_count = current_node_count
self.target_node_count = target_node_count
self.max_node_count = max_node_count
self.min_node_count = min_node_count
self.idle_node_count = idle_node_count
self.running_node_count = running_node_count
self.preparing_node_count = preparing_node_count
self.unusable_node_count = unusable_node_count
self.leaving_node_count = leaving_node_count
self.preempted_node_count = preempted_node_count
self.vm_size = vm_size
self.location = location
self.provisioning_state = provisioning_state
self.state = state
self.os_type = os_type
self.id = id
self.name = name
self.created_by_studio = created_by_studio
self.is_gpu_type = is_gpu_type
self.resource_id = resource_id
self.compute_type = compute_type
class ExperimentInfo(msrest.serialization.Model):
"""ExperimentInfo.
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
*,
experiment_name: Optional[str] = None,
experiment_id: Optional[str] = None,
**kwargs
):
"""
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(ExperimentInfo, self).__init__(**kwargs)
self.experiment_name = experiment_name
self.experiment_id = experiment_id
class ExportComponentMetaInfo(msrest.serialization.Model):
"""ExportComponentMetaInfo.
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar module_version:
:vartype module_version: str
:ivar is_anonymous:
:vartype is_anonymous: bool
"""
_attribute_map = {
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
}
def __init__(
self,
*,
module_entity: Optional["ModuleEntity"] = None,
module_version: Optional[str] = None,
is_anonymous: Optional[bool] = None,
**kwargs
):
"""
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword module_version:
:paramtype module_version: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
"""
super(ExportComponentMetaInfo, self).__init__(**kwargs)
self.module_entity = module_entity
self.module_version = module_version
self.is_anonymous = is_anonymous
class ExportDataTask(msrest.serialization.Model):
"""ExportDataTask.
:ivar data_transfer_sink:
:vartype data_transfer_sink: ~flow.models.DataTransferSink
"""
_attribute_map = {
'data_transfer_sink': {'key': 'DataTransferSink', 'type': 'DataTransferSink'},
}
def __init__(
self,
*,
data_transfer_sink: Optional["DataTransferSink"] = None,
**kwargs
):
"""
:keyword data_transfer_sink:
:paramtype data_transfer_sink: ~flow.models.DataTransferSink
"""
super(ExportDataTask, self).__init__(**kwargs)
self.data_transfer_sink = data_transfer_sink
class FeaturizationSettings(msrest.serialization.Model):
"""FeaturizationSettings.
:ivar mode: Possible values include: "Auto", "Custom", "Off".
:vartype mode: str or ~flow.models.FeaturizationMode
:ivar blocked_transformers:
:vartype blocked_transformers: list[str]
:ivar column_purposes: Dictionary of :code:`<string>`.
:vartype column_purposes: dict[str, str]
:ivar drop_columns:
:vartype drop_columns: list[str]
:ivar transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:vartype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:ivar dataset_language:
:vartype dataset_language: str
:ivar enable_dnn_featurization:
:vartype enable_dnn_featurization: bool
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'},
'column_purposes': {'key': 'columnPurposes', 'type': '{str}'},
'drop_columns': {'key': 'dropColumns', 'type': '[str]'},
'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'},
'dataset_language': {'key': 'datasetLanguage', 'type': 'str'},
'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "FeaturizationMode"]] = None,
blocked_transformers: Optional[List[str]] = None,
column_purposes: Optional[Dict[str, str]] = None,
drop_columns: Optional[List[str]] = None,
transformer_params: Optional[Dict[str, List["ColumnTransformer"]]] = None,
dataset_language: Optional[str] = None,
enable_dnn_featurization: Optional[bool] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom", "Off".
:paramtype mode: str or ~flow.models.FeaturizationMode
:keyword blocked_transformers:
:paramtype blocked_transformers: list[str]
:keyword column_purposes: Dictionary of :code:`<string>`.
:paramtype column_purposes: dict[str, str]
:keyword drop_columns:
:paramtype drop_columns: list[str]
:keyword transformer_params: Dictionary of
<components·1gi3krm·schemas·featurizationsettings·properties·transformerparams·additionalproperties>.
:paramtype transformer_params: dict[str, list[~flow.models.ColumnTransformer]]
:keyword dataset_language:
:paramtype dataset_language: str
:keyword enable_dnn_featurization:
:paramtype enable_dnn_featurization: bool
"""
super(FeaturizationSettings, self).__init__(**kwargs)
self.mode = mode
self.blocked_transformers = blocked_transformers
self.column_purposes = column_purposes
self.drop_columns = drop_columns
self.transformer_params = transformer_params
self.dataset_language = dataset_language
self.enable_dnn_featurization = enable_dnn_featurization
class FeedDto(msrest.serialization.Model):
"""FeedDto.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar sharing_scopes:
:vartype sharing_scopes: list[~flow.models.SharingScope]
:ivar supported_asset_types:
:vartype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:ivar regional_workspace_storage: This is a dictionary.
:vartype regional_workspace_storage: dict[str, list[str]]
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'sharing_scopes': {'key': 'sharingScopes', 'type': '[SharingScope]'},
'supported_asset_types': {'key': 'supportedAssetTypes', 'type': 'FeedDtoSupportedAssetTypes'},
'regional_workspace_storage': {'key': 'regionalWorkspaceStorage', 'type': '{[str]}'},
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
description: Optional[str] = None,
sharing_scopes: Optional[List["SharingScope"]] = None,
supported_asset_types: Optional["FeedDtoSupportedAssetTypes"] = None,
regional_workspace_storage: Optional[Dict[str, List[str]]] = None,
intellectual_property_publisher: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword sharing_scopes:
:paramtype sharing_scopes: list[~flow.models.SharingScope]
:keyword supported_asset_types:
:paramtype supported_asset_types: ~flow.models.FeedDtoSupportedAssetTypes
:keyword regional_workspace_storage: This is a dictionary.
:paramtype regional_workspace_storage: dict[str, list[str]]
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(FeedDto, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.description = description
self.sharing_scopes = sharing_scopes
self.supported_asset_types = supported_asset_types
self.regional_workspace_storage = regional_workspace_storage
self.intellectual_property_publisher = intellectual_property_publisher
class FeedDtoSupportedAssetTypes(msrest.serialization.Model):
"""FeedDtoSupportedAssetTypes.
:ivar component:
:vartype component: ~flow.models.AssetTypeMetaInfo
:ivar model:
:vartype model: ~flow.models.AssetTypeMetaInfo
:ivar environment:
:vartype environment: ~flow.models.AssetTypeMetaInfo
:ivar dataset:
:vartype dataset: ~flow.models.AssetTypeMetaInfo
:ivar data_store:
:vartype data_store: ~flow.models.AssetTypeMetaInfo
:ivar sample_graph:
:vartype sample_graph: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool:
:vartype flow_tool: ~flow.models.AssetTypeMetaInfo
:ivar flow_tool_setting:
:vartype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:ivar flow_connection:
:vartype flow_connection: ~flow.models.AssetTypeMetaInfo
:ivar flow_sample:
:vartype flow_sample: ~flow.models.AssetTypeMetaInfo
:ivar flow_runtime_spec:
:vartype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
_attribute_map = {
'component': {'key': 'Component', 'type': 'AssetTypeMetaInfo'},
'model': {'key': 'Model', 'type': 'AssetTypeMetaInfo'},
'environment': {'key': 'Environment', 'type': 'AssetTypeMetaInfo'},
'dataset': {'key': 'Dataset', 'type': 'AssetTypeMetaInfo'},
'data_store': {'key': 'DataStore', 'type': 'AssetTypeMetaInfo'},
'sample_graph': {'key': 'SampleGraph', 'type': 'AssetTypeMetaInfo'},
'flow_tool': {'key': 'FlowTool', 'type': 'AssetTypeMetaInfo'},
'flow_tool_setting': {'key': 'FlowToolSetting', 'type': 'AssetTypeMetaInfo'},
'flow_connection': {'key': 'FlowConnection', 'type': 'AssetTypeMetaInfo'},
'flow_sample': {'key': 'FlowSample', 'type': 'AssetTypeMetaInfo'},
'flow_runtime_spec': {'key': 'FlowRuntimeSpec', 'type': 'AssetTypeMetaInfo'},
}
def __init__(
self,
*,
component: Optional["AssetTypeMetaInfo"] = None,
model: Optional["AssetTypeMetaInfo"] = None,
environment: Optional["AssetTypeMetaInfo"] = None,
dataset: Optional["AssetTypeMetaInfo"] = None,
data_store: Optional["AssetTypeMetaInfo"] = None,
sample_graph: Optional["AssetTypeMetaInfo"] = None,
flow_tool: Optional["AssetTypeMetaInfo"] = None,
flow_tool_setting: Optional["AssetTypeMetaInfo"] = None,
flow_connection: Optional["AssetTypeMetaInfo"] = None,
flow_sample: Optional["AssetTypeMetaInfo"] = None,
flow_runtime_spec: Optional["AssetTypeMetaInfo"] = None,
**kwargs
):
"""
:keyword component:
:paramtype component: ~flow.models.AssetTypeMetaInfo
:keyword model:
:paramtype model: ~flow.models.AssetTypeMetaInfo
:keyword environment:
:paramtype environment: ~flow.models.AssetTypeMetaInfo
:keyword dataset:
:paramtype dataset: ~flow.models.AssetTypeMetaInfo
:keyword data_store:
:paramtype data_store: ~flow.models.AssetTypeMetaInfo
:keyword sample_graph:
:paramtype sample_graph: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool:
:paramtype flow_tool: ~flow.models.AssetTypeMetaInfo
:keyword flow_tool_setting:
:paramtype flow_tool_setting: ~flow.models.AssetTypeMetaInfo
:keyword flow_connection:
:paramtype flow_connection: ~flow.models.AssetTypeMetaInfo
:keyword flow_sample:
:paramtype flow_sample: ~flow.models.AssetTypeMetaInfo
:keyword flow_runtime_spec:
:paramtype flow_runtime_spec: ~flow.models.AssetTypeMetaInfo
"""
super(FeedDtoSupportedAssetTypes, self).__init__(**kwargs)
self.component = component
self.model = model
self.environment = environment
self.dataset = dataset
self.data_store = data_store
self.sample_graph = sample_graph
self.flow_tool = flow_tool
self.flow_tool_setting = flow_tool_setting
self.flow_connection = flow_connection
self.flow_sample = flow_sample
self.flow_runtime_spec = flow_runtime_spec
class FileSystem(msrest.serialization.Model):
"""FileSystem.
:ivar connection:
:vartype connection: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'connection': {'key': 'connection', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
*,
connection: Optional[str] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword connection:
:paramtype connection: str
:keyword path:
:paramtype path: str
"""
super(FileSystem, self).__init__(**kwargs)
self.connection = connection
self.path = path
class Flow(msrest.serialization.Model):
"""Flow.
:ivar source_resource_id:
:vartype source_resource_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.NodeVariant]
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar bulk_test_data: This is a dictionary.
:vartype bulk_test_data: dict[str, str]
:ivar evaluation_flows: This is a dictionary.
:vartype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
_attribute_map = {
'source_resource_id': {'key': 'sourceResourceId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'node_variants': {'key': 'nodeVariants', 'type': '{NodeVariant}'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'bulk_test_data': {'key': 'bulkTestData', 'type': '{str}'},
'evaluation_flows': {'key': 'evaluationFlows', 'type': '{FlowGraphReference}'},
}
def __init__(
self,
*,
source_resource_id: Optional[str] = None,
flow_graph: Optional["FlowGraph"] = None,
node_variants: Optional[Dict[str, "NodeVariant"]] = None,
flow_graph_layout: Optional["FlowGraphLayout"] = None,
bulk_test_data: Optional[Dict[str, str]] = None,
evaluation_flows: Optional[Dict[str, "FlowGraphReference"]] = None,
**kwargs
):
"""
:keyword source_resource_id:
:paramtype source_resource_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.NodeVariant]
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword bulk_test_data: This is a dictionary.
:paramtype bulk_test_data: dict[str, str]
:keyword evaluation_flows: This is a dictionary.
:paramtype evaluation_flows: dict[str, ~flow.models.FlowGraphReference]
"""
super(Flow, self).__init__(**kwargs)
self.source_resource_id = source_resource_id
self.flow_graph = flow_graph
self.node_variants = node_variants
self.flow_graph_layout = flow_graph_layout
self.bulk_test_data = bulk_test_data
self.evaluation_flows = evaluation_flows
class FlowAnnotations(msrest.serialization.Model):
"""FlowAnnotations.
:ivar flow_name:
:vartype flow_name: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'flow_name': {'key': 'flowName', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
flow_name: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
is_archived: Optional[bool] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
name: Optional[str] = None,
description: Optional[str] = None,
archived: Optional[bool] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword flow_name:
:paramtype flow_name: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(FlowAnnotations, self).__init__(**kwargs)
self.flow_name = flow_name
self.created_date = created_date
self.last_modified_date = last_modified_date
self.owner = owner
self.is_archived = is_archived
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.name = name
self.description = description
self.archived = archived
self.tags = tags
class FlowBaseDto(msrest.serialization.Model):
"""FlowBaseDto.
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
flow_id: Optional[str] = None,
flow_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
experiment_id: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
flow_resource_id: Optional[str] = None,
is_archived: Optional[bool] = None,
flow_definition_file_path: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowBaseDto, self).__init__(**kwargs)
self.flow_id = flow_id
self.flow_name = flow_name
self.description = description
self.tags = tags
self.flow_type = flow_type
self.experiment_id = experiment_id
self.created_date = created_date
self.last_modified_date = last_modified_date
self.owner = owner
self.flow_resource_id = flow_resource_id
self.is_archived = is_archived
self.flow_definition_file_path = flow_definition_file_path
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class FlowDto(msrest.serialization.Model):
"""FlowDto.
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
:ivar e_tag: Any object.
:vartype e_tag: any
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar experiment_id:
:vartype experiment_id: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
:ivar flow_resource_id:
:vartype flow_resource_id: str
:ivar is_archived:
:vartype is_archived: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'e_tag': {'key': 'eTag', 'type': 'object'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
'flow_resource_id': {'key': 'flowResourceId', 'type': 'str'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
timestamp: Optional[datetime.datetime] = None,
e_tag: Optional[Any] = None,
flow: Optional["Flow"] = None,
flow_run_settings: Optional["FlowRunSettings"] = None,
flow_run_result: Optional["FlowRunResult"] = None,
flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None,
flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None,
studio_portal_endpoint: Optional[str] = None,
flow_id: Optional[str] = None,
flow_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
experiment_id: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
flow_resource_id: Optional[str] = None,
is_archived: Optional[bool] = None,
flow_definition_file_path: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
:keyword e_tag: Any object.
:paramtype e_tag: any
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword experiment_id:
:paramtype experiment_id: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
:keyword flow_resource_id:
:paramtype flow_resource_id: str
:keyword is_archived:
:paramtype is_archived: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowDto, self).__init__(**kwargs)
self.timestamp = timestamp
self.e_tag = e_tag
self.flow = flow
self.flow_run_settings = flow_run_settings
self.flow_run_result = flow_run_result
self.flow_test_mode = flow_test_mode
self.flow_test_infos = flow_test_infos
self.studio_portal_endpoint = studio_portal_endpoint
self.flow_id = flow_id
self.flow_name = flow_name
self.description = description
self.tags = tags
self.flow_type = flow_type
self.experiment_id = experiment_id
self.created_date = created_date
self.last_modified_date = last_modified_date
self.owner = owner
self.flow_resource_id = flow_resource_id
self.is_archived = is_archived
self.flow_definition_file_path = flow_definition_file_path
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class FlowEnvironment(msrest.serialization.Model):
"""FlowEnvironment.
:ivar image:
:vartype image: str
:ivar python_requirements_txt:
:vartype python_requirements_txt: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'python_requirements_txt': {'key': 'python_requirements_txt', 'type': 'str'},
}
def __init__(
self,
*,
image: Optional[str] = None,
python_requirements_txt: Optional[str] = None,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword python_requirements_txt:
:paramtype python_requirements_txt: str
"""
super(FlowEnvironment, self).__init__(**kwargs)
self.image = image
self.python_requirements_txt = python_requirements_txt
class FlowFeature(msrest.serialization.Model):
"""FlowFeature.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar state:
:vartype state: ~flow.models.FlowFeatureState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'state': {'key': 'state', 'type': 'FlowFeatureState'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
state: Optional["FlowFeatureState"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword state:
:paramtype state: ~flow.models.FlowFeatureState
"""
super(FlowFeature, self).__init__(**kwargs)
self.name = name
self.description = description
self.state = state
class FlowFeatureState(msrest.serialization.Model):
"""FlowFeatureState.
:ivar runtime: Possible values include: "Ready", "E2ETest".
:vartype runtime: str or ~flow.models.FlowFeatureStateEnum
:ivar executor: Possible values include: "Ready", "E2ETest".
:vartype executor: str or ~flow.models.FlowFeatureStateEnum
:ivar pfs: Possible values include: "Ready", "E2ETest".
:vartype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
_attribute_map = {
'runtime': {'key': 'Runtime', 'type': 'str'},
'executor': {'key': 'Executor', 'type': 'str'},
'pfs': {'key': 'PFS', 'type': 'str'},
}
def __init__(
self,
*,
runtime: Optional[Union[str, "FlowFeatureStateEnum"]] = None,
executor: Optional[Union[str, "FlowFeatureStateEnum"]] = None,
pfs: Optional[Union[str, "FlowFeatureStateEnum"]] = None,
**kwargs
):
"""
:keyword runtime: Possible values include: "Ready", "E2ETest".
:paramtype runtime: str or ~flow.models.FlowFeatureStateEnum
:keyword executor: Possible values include: "Ready", "E2ETest".
:paramtype executor: str or ~flow.models.FlowFeatureStateEnum
:keyword pfs: Possible values include: "Ready", "E2ETest".
:paramtype pfs: str or ~flow.models.FlowFeatureStateEnum
"""
super(FlowFeatureState, self).__init__(**kwargs)
self.runtime = runtime
self.executor = executor
self.pfs = pfs
class FlowGraph(msrest.serialization.Model):
"""FlowGraph.
:ivar nodes:
:vartype nodes: list[~flow.models.Node]
:ivar tools:
:vartype tools: list[~flow.models.Tool]
:ivar codes: This is a dictionary.
:vartype codes: dict[str, str]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
_attribute_map = {
'nodes': {'key': 'nodes', 'type': '[Node]'},
'tools': {'key': 'tools', 'type': '[Tool]'},
'codes': {'key': 'codes', 'type': '{str}'},
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
}
def __init__(
self,
*,
nodes: Optional[List["Node"]] = None,
tools: Optional[List["Tool"]] = None,
codes: Optional[Dict[str, str]] = None,
inputs: Optional[Dict[str, "FlowInputDefinition"]] = None,
outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None,
**kwargs
):
"""
:keyword nodes:
:paramtype nodes: list[~flow.models.Node]
:keyword tools:
:paramtype tools: list[~flow.models.Tool]
:keyword codes: This is a dictionary.
:paramtype codes: dict[str, str]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
"""
super(FlowGraph, self).__init__(**kwargs)
self.nodes = nodes
self.tools = tools
self.codes = codes
self.inputs = inputs
self.outputs = outputs
class FlowGraphAnnotationNode(msrest.serialization.Model):
"""FlowGraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
content: Optional[str] = None,
mentioned_node_names: Optional[List[str]] = None,
structured_content: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(FlowGraphAnnotationNode, self).__init__(**kwargs)
self.id = id
self.content = content
self.mentioned_node_names = mentioned_node_names
self.structured_content = structured_content
class FlowGraphLayout(msrest.serialization.Model):
"""FlowGraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:ivar orientation: Possible values include: "Horizontal", "Vertical".
:vartype orientation: str or ~flow.models.Orientation
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{FlowNodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[FlowGraphAnnotationNode]'},
'orientation': {'key': 'orientation', 'type': 'str'},
}
def __init__(
self,
*,
node_layouts: Optional[Dict[str, "FlowNodeLayout"]] = None,
extended_data: Optional[str] = None,
annotation_nodes: Optional[List["FlowGraphAnnotationNode"]] = None,
orientation: Optional[Union[str, "Orientation"]] = None,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.FlowNodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.FlowGraphAnnotationNode]
:keyword orientation: Possible values include: "Horizontal", "Vertical".
:paramtype orientation: str or ~flow.models.Orientation
"""
super(FlowGraphLayout, self).__init__(**kwargs)
self.node_layouts = node_layouts
self.extended_data = extended_data
self.annotation_nodes = annotation_nodes
self.orientation = orientation
class FlowGraphReference(msrest.serialization.Model):
"""FlowGraphReference.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar reference_resource_id:
:vartype reference_resource_id: str
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'reference_resource_id': {'key': 'referenceResourceId', 'type': 'str'},
}
def __init__(
self,
*,
flow_graph: Optional["FlowGraph"] = None,
reference_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword reference_resource_id:
:paramtype reference_resource_id: str
"""
super(FlowGraphReference, self).__init__(**kwargs)
self.flow_graph = flow_graph
self.reference_resource_id = reference_resource_id
class FlowIndexEntity(msrest.serialization.Model):
"""FlowIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.FlowAnnotations
:ivar properties:
:vartype properties: ~flow.models.FlowProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'FlowAnnotations'},
'properties': {'key': 'properties', 'type': 'FlowProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
*,
schema_id: Optional[str] = None,
entity_id: Optional[str] = None,
kind: Optional[Union[str, "EntityKind"]] = None,
annotations: Optional["FlowAnnotations"] = None,
properties: Optional["FlowProperties"] = None,
internal: Optional[Any] = None,
update_sequence: Optional[int] = None,
type: Optional[str] = None,
relationships: Optional[List["Relationship"]] = None,
asset_id: Optional[str] = None,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.FlowAnnotations
:keyword properties:
:paramtype properties: ~flow.models.FlowProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(FlowIndexEntity, self).__init__(**kwargs)
self.schema_id = schema_id
self.entity_id = entity_id
self.kind = kind
self.annotations = annotations
self.properties = properties
self.internal = internal
self.update_sequence = update_sequence
self.type = type
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = relationships
self.asset_id = asset_id
class FlowInputDefinition(msrest.serialization.Model):
"""FlowInputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype type: str or ~flow.models.ValueType
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar is_chat_input:
:vartype is_chat_input: bool
:ivar is_chat_history:
:vartype is_chat_history: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'is_chat_input': {'key': 'is_chat_input', 'type': 'bool'},
'is_chat_history': {'key': 'is_chat_history', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "ValueType"]] = None,
default: Optional[Any] = None,
description: Optional[str] = None,
is_chat_input: Optional[bool] = None,
is_chat_history: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype type: str or ~flow.models.ValueType
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword is_chat_input:
:paramtype is_chat_input: bool
:keyword is_chat_history:
:paramtype is_chat_history: bool
"""
super(FlowInputDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.default = default
self.description = description
self.is_chat_input = is_chat_input
self.is_chat_history = is_chat_history
class FlowNode(msrest.serialization.Model):
"""FlowNode.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar use_variants:
:vartype use_variants: bool
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
:ivar aggregation:
:vartype aggregation: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'use_variants': {'key': 'use_variants', 'type': 'bool'},
'activate': {'key': 'activate', 'type': 'Activate'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'aggregation': {'key': 'aggregation', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "ToolType"]] = None,
source: Optional["NodeSource"] = None,
inputs: Optional[Dict[str, Any]] = None,
use_variants: Optional[bool] = None,
activate: Optional["Activate"] = None,
comment: Optional[str] = None,
api: Optional[str] = None,
provider: Optional[str] = None,
connection: Optional[str] = None,
module: Optional[str] = None,
aggregation: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword use_variants:
:paramtype use_variants: bool
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
:keyword aggregation:
:paramtype aggregation: bool
"""
super(FlowNode, self).__init__(**kwargs)
self.name = name
self.type = type
self.source = source
self.inputs = inputs
self.use_variants = use_variants
self.activate = activate
self.comment = comment
self.api = api
self.provider = provider
self.connection = connection
self.module = module
self.aggregation = aggregation
class FlowNodeLayout(msrest.serialization.Model):
"""FlowNodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar index:
:vartype index: int
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'index': {'key': 'index', 'type': 'int'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
*,
x: Optional[float] = None,
y: Optional[float] = None,
width: Optional[float] = None,
height: Optional[float] = None,
index: Optional[int] = None,
extended_data: Optional[str] = None,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword index:
:paramtype index: int
:keyword extended_data:
:paramtype extended_data: str
"""
super(FlowNodeLayout, self).__init__(**kwargs)
self.x = x
self.y = y
self.width = width
self.height = height
self.index = index
self.extended_data = extended_data
class FlowNodeVariant(msrest.serialization.Model):
"""FlowNodeVariant.
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.FlowVariantNode]
"""
_attribute_map = {
'default_variant_id': {'key': 'default_variant_id', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{FlowVariantNode}'},
}
def __init__(
self,
*,
default_variant_id: Optional[str] = None,
variants: Optional[Dict[str, "FlowVariantNode"]] = None,
**kwargs
):
"""
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.FlowVariantNode]
"""
super(FlowNodeVariant, self).__init__(**kwargs)
self.default_variant_id = default_variant_id
self.variants = variants
class FlowOutputDefinition(msrest.serialization.Model):
"""FlowOutputDefinition.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype type: str or ~flow.models.ValueType
:ivar description:
:vartype description: str
:ivar reference:
:vartype reference: str
:ivar evaluation_only:
:vartype evaluation_only: bool
:ivar is_chat_output:
:vartype is_chat_output: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'reference': {'key': 'reference', 'type': 'str'},
'evaluation_only': {'key': 'evaluation_only', 'type': 'bool'},
'is_chat_output': {'key': 'is_chat_output', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "ValueType"]] = None,
description: Optional[str] = None,
reference: Optional[str] = None,
evaluation_only: Optional[bool] = None,
is_chat_output: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype type: str or ~flow.models.ValueType
:keyword description:
:paramtype description: str
:keyword reference:
:paramtype reference: str
:keyword evaluation_only:
:paramtype evaluation_only: bool
:keyword is_chat_output:
:paramtype is_chat_output: bool
"""
super(FlowOutputDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.description = description
self.reference = reference
self.evaluation_only = evaluation_only
self.is_chat_output = is_chat_output
class FlowProperties(msrest.serialization.Model):
"""FlowProperties.
:ivar flow_id:
:vartype flow_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'flow_id': {'key': 'flowId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
*,
flow_id: Optional[str] = None,
experiment_id: Optional[str] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
flow_definition_file_path: Optional[str] = None,
creation_context: Optional["CreationContext"] = None,
**kwargs
):
"""
:keyword flow_id:
:paramtype flow_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(FlowProperties, self).__init__(**kwargs)
self.flow_id = flow_id
self.experiment_id = experiment_id
self.flow_type = flow_type
self.flow_definition_file_path = flow_definition_file_path
self.creation_context = creation_context
class FlowRunBasePath(msrest.serialization.Model):
"""FlowRunBasePath.
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar base_path:
:vartype base_path: str
"""
_attribute_map = {
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'base_path': {'key': 'basePath', 'type': 'str'},
}
def __init__(
self,
*,
output_datastore_name: Optional[str] = None,
base_path: Optional[str] = None,
**kwargs
):
"""
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword base_path:
:paramtype base_path: str
"""
super(FlowRunBasePath, self).__init__(**kwargs)
self.output_datastore_name = output_datastore_name
self.base_path = base_path
class FlowRunInfo(msrest.serialization.Model):
"""FlowRunInfo.
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar output_datastore_name:
:vartype output_datastore_name: str
:ivar child_run_base_path:
:vartype child_run_base_path: str
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar studio_portal_endpoint:
:vartype studio_portal_endpoint: str
"""
_attribute_map = {
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
'child_run_base_path': {'key': 'childRunBasePath', 'type': 'str'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'studio_portal_endpoint': {'key': 'studioPortalEndpoint', 'type': 'str'},
}
def __init__(
self,
*,
flow_graph: Optional["FlowGraph"] = None,
flow_graph_layout: Optional["FlowGraphLayout"] = None,
flow_name: Optional[str] = None,
flow_run_resource_id: Optional[str] = None,
flow_run_id: Optional[str] = None,
flow_run_display_name: Optional[str] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
runtime_name: Optional[str] = None,
bulk_test_id: Optional[str] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
created_on: Optional[datetime.datetime] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
output_datastore_name: Optional[str] = None,
child_run_base_path: Optional[str] = None,
working_directory: Optional[str] = None,
flow_dag_file_relative_path: Optional[str] = None,
flow_snapshot_id: Optional[str] = None,
studio_portal_endpoint: Optional[str] = None,
**kwargs
):
"""
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword output_datastore_name:
:paramtype output_datastore_name: str
:keyword child_run_base_path:
:paramtype child_run_base_path: str
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword studio_portal_endpoint:
:paramtype studio_portal_endpoint: str
"""
super(FlowRunInfo, self).__init__(**kwargs)
self.flow_graph = flow_graph
self.flow_graph_layout = flow_graph_layout
self.flow_name = flow_name
self.flow_run_resource_id = flow_run_resource_id
self.flow_run_id = flow_run_id
self.flow_run_display_name = flow_run_display_name
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
self.flow_run_type = flow_run_type
self.flow_type = flow_type
self.runtime_name = runtime_name
self.bulk_test_id = bulk_test_id
self.created_by = created_by
self.created_on = created_on
self.inputs_mapping = inputs_mapping
self.output_datastore_name = output_datastore_name
self.child_run_base_path = child_run_base_path
self.working_directory = working_directory
self.flow_dag_file_relative_path = flow_dag_file_relative_path
self.flow_snapshot_id = flow_snapshot_id
self.studio_portal_endpoint = studio_portal_endpoint
class FlowRunResult(msrest.serialization.Model):
"""FlowRunResult.
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
*,
flow_runs: Optional[List[Any]] = None,
node_runs: Optional[List[Any]] = None,
error_response: Optional["ErrorResponse"] = None,
flow_name: Optional[str] = None,
flow_run_display_name: Optional[str] = None,
flow_run_id: Optional[str] = None,
flow_graph: Optional["FlowGraph"] = None,
flow_graph_layout: Optional["FlowGraphLayout"] = None,
flow_run_resource_id: Optional[str] = None,
bulk_test_id: Optional[str] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
created_on: Optional[datetime.datetime] = None,
flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
runtime_name: Optional[str] = None,
aml_compute_name: Optional[str] = None,
flow_run_logs: Optional[Dict[str, str]] = None,
flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None,
flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None,
working_directory: Optional[str] = None,
flow_dag_file_relative_path: Optional[str] = None,
flow_snapshot_id: Optional[str] = None,
variant_run_to_evaluation_runs_id_mapping: Optional[Dict[str, List[str]]] = None,
**kwargs
):
"""
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1k1eaeg·schemas·flowrunresult·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(FlowRunResult, self).__init__(**kwargs)
self.flow_runs = flow_runs
self.node_runs = node_runs
self.error_response = error_response
self.flow_name = flow_name
self.flow_run_display_name = flow_run_display_name
self.flow_run_id = flow_run_id
self.flow_graph = flow_graph
self.flow_graph_layout = flow_graph_layout
self.flow_run_resource_id = flow_run_resource_id
self.bulk_test_id = bulk_test_id
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
self.created_by = created_by
self.created_on = created_on
self.flow_run_type = flow_run_type
self.flow_type = flow_type
self.runtime_name = runtime_name
self.aml_compute_name = aml_compute_name
self.flow_run_logs = flow_run_logs
self.flow_test_mode = flow_test_mode
self.flow_test_infos = flow_test_infos
self.working_directory = working_directory
self.flow_dag_file_relative_path = flow_dag_file_relative_path
self.flow_snapshot_id = flow_snapshot_id
self.variant_run_to_evaluation_runs_id_mapping = variant_run_to_evaluation_runs_id_mapping
class FlowRunSettings(msrest.serialization.Model):
"""FlowRunSettings.
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar variants_tools:
:vartype variants_tools: list[~flow.models.Tool]
:ivar variants_codes: This is a dictionary.
:vartype variants_codes: dict[str, str]
:ivar node_name:
:vartype node_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
"""
_attribute_map = {
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'run_mode': {'key': 'runMode', 'type': 'str'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'},
'variants_codes': {'key': 'variantsCodes', 'type': '{str}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
}
def __init__(
self,
*,
flow_run_display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
run_mode: Optional[Union[str, "FlowRunMode"]] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
tuning_node_names: Optional[List[str]] = None,
tuning_node_settings: Optional[Dict[str, "TuningNodeSetting"]] = None,
baseline_variant_id: Optional[str] = None,
default_variant_id: Optional[str] = None,
variants: Optional[Dict[str, List["Node"]]] = None,
variants_tools: Optional[List["Tool"]] = None,
variants_codes: Optional[Dict[str, str]] = None,
node_name: Optional[str] = None,
bulk_test_id: Optional[str] = None,
evaluation_flow_run_settings: Optional[Dict[str, "EvaluationFlowRunSettings"]] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
data_inputs: Optional[Dict[str, str]] = None,
bulk_test_flow_id: Optional[str] = None,
bulk_test_flow_run_ids: Optional[List[str]] = None,
aml_compute_name: Optional[str] = None,
runtime_name: Optional[str] = None,
flow_run_output_directory: Optional[str] = None,
**kwargs
):
"""
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword variants_tools:
:paramtype variants_tools: list[~flow.models.Tool]
:keyword variants_codes: This is a dictionary.
:paramtype variants_codes: dict[str, str]
:keyword node_name:
:paramtype node_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
"""
super(FlowRunSettings, self).__init__(**kwargs)
self.flow_run_display_name = flow_run_display_name
self.description = description
self.tags = tags
self.properties = properties
self.run_mode = run_mode
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
self.tuning_node_names = tuning_node_names
self.tuning_node_settings = tuning_node_settings
self.baseline_variant_id = baseline_variant_id
self.default_variant_id = default_variant_id
self.variants = variants
self.variants_tools = variants_tools
self.variants_codes = variants_codes
self.node_name = node_name
self.bulk_test_id = bulk_test_id
self.evaluation_flow_run_settings = evaluation_flow_run_settings
self.inputs_mapping = inputs_mapping
self.data_inputs = data_inputs
self.bulk_test_flow_id = bulk_test_flow_id
self.bulk_test_flow_run_ids = bulk_test_flow_run_ids
self.aml_compute_name = aml_compute_name
self.runtime_name = runtime_name
self.flow_run_output_directory = flow_run_output_directory
class FlowRuntimeCapability(msrest.serialization.Model):
"""FlowRuntimeCapability.
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
"""
_attribute_map = {
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
}
def __init__(
self,
*,
flow_features: Optional[List["FlowFeature"]] = None,
**kwargs
):
"""
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
"""
super(FlowRuntimeCapability, self).__init__(**kwargs)
self.flow_features = flow_features
class FlowRuntimeDto(msrest.serialization.Model):
"""FlowRuntimeDto.
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
*,
runtime_name: Optional[str] = None,
runtime_description: Optional[str] = None,
runtime_type: Optional[Union[str, "RuntimeType"]] = None,
environment: Optional[str] = None,
status: Optional[Union[str, "RuntimeStatusEnum"]] = None,
status_message: Optional[str] = None,
error: Optional["ErrorResponse"] = None,
from_existing_endpoint: Optional[bool] = None,
endpoint_name: Optional[str] = None,
from_existing_deployment: Optional[bool] = None,
deployment_name: Optional[str] = None,
identity: Optional["ManagedServiceIdentity"] = None,
instance_type: Optional[str] = None,
instance_count: Optional[int] = None,
compute_instance_name: Optional[str] = None,
docker_image: Optional[str] = None,
published_port: Optional[int] = None,
target_port: Optional[int] = None,
from_existing_custom_app: Optional[bool] = None,
custom_app_name: Optional[str] = None,
assigned_to: Optional["AssignedUser"] = None,
endpoint_url: Optional[str] = None,
created_on: Optional[datetime.datetime] = None,
modified_on: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
**kwargs
):
"""
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowRuntimeDto, self).__init__(**kwargs)
self.runtime_name = runtime_name
self.runtime_description = runtime_description
self.runtime_type = runtime_type
self.environment = environment
self.status = status
self.status_message = status_message
self.error = error
self.from_existing_endpoint = from_existing_endpoint
self.endpoint_name = endpoint_name
self.from_existing_deployment = from_existing_deployment
self.deployment_name = deployment_name
self.identity = identity
self.instance_type = instance_type
self.instance_count = instance_count
self.compute_instance_name = compute_instance_name
self.docker_image = docker_image
self.published_port = published_port
self.target_port = target_port
self.from_existing_custom_app = from_existing_custom_app
self.custom_app_name = custom_app_name
self.assigned_to = assigned_to
self.endpoint_url = endpoint_url
self.created_on = created_on
self.modified_on = modified_on
self.owner = owner
class FlowSampleDto(msrest.serialization.Model):
"""FlowSampleDto.
:ivar sample_resource_id:
:vartype sample_resource_id: str
:ivar section: Possible values include: "Gallery", "Template".
:vartype section: str or ~flow.models.Section
:ivar index_number:
:vartype index_number: int
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'sample_resource_id': {'key': 'sampleResourceId', 'type': 'str'},
'section': {'key': 'section', 'type': 'str'},
'index_number': {'key': 'indexNumber', 'type': 'int'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
sample_resource_id: Optional[str] = None,
section: Optional[Union[str, "Section"]] = None,
index_number: Optional[int] = None,
flow_name: Optional[str] = None,
description: Optional[str] = None,
details: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
flow: Optional["Flow"] = None,
flow_definition_file_path: Optional[str] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
flow_run_settings: Optional["FlowRunSettings"] = None,
is_archived: Optional[bool] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword sample_resource_id:
:paramtype sample_resource_id: str
:keyword section: Possible values include: "Gallery", "Template".
:paramtype section: str or ~flow.models.Section
:keyword index_number:
:paramtype index_number: int
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(FlowSampleDto, self).__init__(**kwargs)
self.sample_resource_id = sample_resource_id
self.section = section
self.index_number = index_number
self.flow_name = flow_name
self.description = description
self.details = details
self.tags = tags
self.flow = flow
self.flow_definition_file_path = flow_definition_file_path
self.flow_type = flow_type
self.flow_run_settings = flow_run_settings
self.is_archived = is_archived
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class FlowSessionDto(msrest.serialization.Model):
"""FlowSessionDto.
:ivar session_id:
:vartype session_id: str
:ivar base_image:
:vartype base_image: str
:ivar packages:
:vartype packages: list[str]
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar flow_features:
:vartype flow_features: list[~flow.models.FlowFeature]
:ivar runtime_name:
:vartype runtime_name: str
:ivar runtime_description:
:vartype runtime_description: str
:ivar runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:vartype runtime_type: str or ~flow.models.RuntimeType
:ivar environment:
:vartype environment: str
:ivar status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:vartype status: str or ~flow.models.RuntimeStatusEnum
:ivar status_message:
:vartype status_message: str
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar from_existing_endpoint:
:vartype from_existing_endpoint: bool
:ivar endpoint_name:
:vartype endpoint_name: str
:ivar from_existing_deployment:
:vartype from_existing_deployment: bool
:ivar deployment_name:
:vartype deployment_name: str
:ivar identity:
:vartype identity: ~flow.models.ManagedServiceIdentity
:ivar instance_type:
:vartype instance_type: str
:ivar instance_count:
:vartype instance_count: int
:ivar compute_instance_name:
:vartype compute_instance_name: str
:ivar docker_image:
:vartype docker_image: str
:ivar published_port:
:vartype published_port: int
:ivar target_port:
:vartype target_port: int
:ivar from_existing_custom_app:
:vartype from_existing_custom_app: bool
:ivar custom_app_name:
:vartype custom_app_name: str
:ivar assigned_to:
:vartype assigned_to: ~flow.models.AssignedUser
:ivar endpoint_url:
:vartype endpoint_url: str
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar modified_on:
:vartype modified_on: ~datetime.datetime
:ivar owner:
:vartype owner: ~flow.models.SchemaContractsCreatedBy
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'base_image': {'key': 'baseImage', 'type': 'str'},
'packages': {'key': 'packages', 'type': '[str]'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'flow_features': {'key': 'flowFeatures', 'type': '[FlowFeature]'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'runtime_type': {'key': 'runtimeType', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_message': {'key': 'statusMessage', 'type': 'str'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'from_existing_endpoint': {'key': 'fromExistingEndpoint', 'type': 'bool'},
'endpoint_name': {'key': 'endpointName', 'type': 'str'},
'from_existing_deployment': {'key': 'fromExistingDeployment', 'type': 'bool'},
'deployment_name': {'key': 'deploymentName', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
'compute_instance_name': {'key': 'computeInstanceName', 'type': 'str'},
'docker_image': {'key': 'dockerImage', 'type': 'str'},
'published_port': {'key': 'publishedPort', 'type': 'int'},
'target_port': {'key': 'targetPort', 'type': 'int'},
'from_existing_custom_app': {'key': 'fromExistingCustomApp', 'type': 'bool'},
'custom_app_name': {'key': 'customAppName', 'type': 'str'},
'assigned_to': {'key': 'assignedTo', 'type': 'AssignedUser'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'SchemaContractsCreatedBy'},
}
def __init__(
self,
*,
session_id: Optional[str] = None,
base_image: Optional[str] = None,
packages: Optional[List[str]] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
flow_features: Optional[List["FlowFeature"]] = None,
runtime_name: Optional[str] = None,
runtime_description: Optional[str] = None,
runtime_type: Optional[Union[str, "RuntimeType"]] = None,
environment: Optional[str] = None,
status: Optional[Union[str, "RuntimeStatusEnum"]] = None,
status_message: Optional[str] = None,
error: Optional["ErrorResponse"] = None,
from_existing_endpoint: Optional[bool] = None,
endpoint_name: Optional[str] = None,
from_existing_deployment: Optional[bool] = None,
deployment_name: Optional[str] = None,
identity: Optional["ManagedServiceIdentity"] = None,
instance_type: Optional[str] = None,
instance_count: Optional[int] = None,
compute_instance_name: Optional[str] = None,
docker_image: Optional[str] = None,
published_port: Optional[int] = None,
target_port: Optional[int] = None,
from_existing_custom_app: Optional[bool] = None,
custom_app_name: Optional[str] = None,
assigned_to: Optional["AssignedUser"] = None,
endpoint_url: Optional[str] = None,
created_on: Optional[datetime.datetime] = None,
modified_on: Optional[datetime.datetime] = None,
owner: Optional["SchemaContractsCreatedBy"] = None,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword base_image:
:paramtype base_image: str
:keyword packages:
:paramtype packages: list[str]
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword flow_features:
:paramtype flow_features: list[~flow.models.FlowFeature]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword runtime_description:
:paramtype runtime_description: str
:keyword runtime_type: Possible values include: "ManagedOnlineEndpoint", "ComputeInstance",
"TrainingSession".
:paramtype runtime_type: str or ~flow.models.RuntimeType
:keyword environment:
:paramtype environment: str
:keyword status: Possible values include: "Unavailable", "Failed", "NotExist", "Starting",
"Stopping".
:paramtype status: str or ~flow.models.RuntimeStatusEnum
:keyword status_message:
:paramtype status_message: str
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword from_existing_endpoint:
:paramtype from_existing_endpoint: bool
:keyword endpoint_name:
:paramtype endpoint_name: str
:keyword from_existing_deployment:
:paramtype from_existing_deployment: bool
:keyword deployment_name:
:paramtype deployment_name: str
:keyword identity:
:paramtype identity: ~flow.models.ManagedServiceIdentity
:keyword instance_type:
:paramtype instance_type: str
:keyword instance_count:
:paramtype instance_count: int
:keyword compute_instance_name:
:paramtype compute_instance_name: str
:keyword docker_image:
:paramtype docker_image: str
:keyword published_port:
:paramtype published_port: int
:keyword target_port:
:paramtype target_port: int
:keyword from_existing_custom_app:
:paramtype from_existing_custom_app: bool
:keyword custom_app_name:
:paramtype custom_app_name: str
:keyword assigned_to:
:paramtype assigned_to: ~flow.models.AssignedUser
:keyword endpoint_url:
:paramtype endpoint_url: str
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword modified_on:
:paramtype modified_on: ~datetime.datetime
:keyword owner:
:paramtype owner: ~flow.models.SchemaContractsCreatedBy
"""
super(FlowSessionDto, self).__init__(**kwargs)
self.session_id = session_id
self.base_image = base_image
self.packages = packages
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.flow_features = flow_features
self.runtime_name = runtime_name
self.runtime_description = runtime_description
self.runtime_type = runtime_type
self.environment = environment
self.status = status
self.status_message = status_message
self.error = error
self.from_existing_endpoint = from_existing_endpoint
self.endpoint_name = endpoint_name
self.from_existing_deployment = from_existing_deployment
self.deployment_name = deployment_name
self.identity = identity
self.instance_type = instance_type
self.instance_count = instance_count
self.compute_instance_name = compute_instance_name
self.docker_image = docker_image
self.published_port = published_port
self.target_port = target_port
self.from_existing_custom_app = from_existing_custom_app
self.custom_app_name = custom_app_name
self.assigned_to = assigned_to
self.endpoint_url = endpoint_url
self.created_on = created_on
self.modified_on = modified_on
self.owner = owner
class FlowSnapshot(msrest.serialization.Model):
"""FlowSnapshot.
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.FlowInputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:ivar nodes:
:vartype nodes: list[~flow.models.FlowNode]
:ivar node_variants: This is a dictionary.
:vartype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:ivar environment:
:vartype environment: ~flow.models.FlowEnvironment
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, any]
:ivar language: Possible values include: "Python", "CSharp".
:vartype language: str or ~flow.models.FlowLanguage
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '{FlowInputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{FlowOutputDefinition}'},
'nodes': {'key': 'nodes', 'type': '[FlowNode]'},
'node_variants': {'key': 'node_variants', 'type': '{FlowNodeVariant}'},
'environment': {'key': 'environment', 'type': 'FlowEnvironment'},
'environment_variables': {'key': 'environment_variables', 'type': '{object}'},
'language': {'key': 'language', 'type': 'str'},
}
def __init__(
self,
*,
inputs: Optional[Dict[str, "FlowInputDefinition"]] = None,
outputs: Optional[Dict[str, "FlowOutputDefinition"]] = None,
nodes: Optional[List["FlowNode"]] = None,
node_variants: Optional[Dict[str, "FlowNodeVariant"]] = None,
environment: Optional["FlowEnvironment"] = None,
environment_variables: Optional[Dict[str, Any]] = None,
language: Optional[Union[str, "FlowLanguage"]] = None,
**kwargs
):
"""
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.FlowInputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.FlowOutputDefinition]
:keyword nodes:
:paramtype nodes: list[~flow.models.FlowNode]
:keyword node_variants: This is a dictionary.
:paramtype node_variants: dict[str, ~flow.models.FlowNodeVariant]
:keyword environment:
:paramtype environment: ~flow.models.FlowEnvironment
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, any]
:keyword language: Possible values include: "Python", "CSharp".
:paramtype language: str or ~flow.models.FlowLanguage
"""
super(FlowSnapshot, self).__init__(**kwargs)
self.inputs = inputs
self.outputs = outputs
self.nodes = nodes
self.node_variants = node_variants
self.environment = environment
self.environment_variables = environment_variables
self.language = language
class FlowSubmitRunSettings(msrest.serialization.Model):
"""FlowSubmitRunSettings.
:ivar node_inputs: This is a dictionary.
:vartype node_inputs: dict[str, any]
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest", "Eval",
"PairwiseEval".
:vartype run_mode: str or ~flow.models.FlowRunMode
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar tuning_node_names:
:vartype tuning_node_names: list[str]
:ivar tuning_node_settings: This is a dictionary.
:vartype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:ivar baseline_variant_id:
:vartype baseline_variant_id: str
:ivar default_variant_id:
:vartype default_variant_id: str
:ivar variants: This is a dictionary.
:vartype variants: dict[str, list[~flow.models.Node]]
:ivar variants_tools:
:vartype variants_tools: list[~flow.models.Tool]
:ivar variants_codes: This is a dictionary.
:vartype variants_codes: dict[str, str]
:ivar node_name:
:vartype node_name: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar evaluation_flow_run_settings: This is a dictionary.
:vartype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar data_inputs: This is a dictionary.
:vartype data_inputs: dict[str, str]
:ivar bulk_test_flow_id:
:vartype bulk_test_flow_id: str
:ivar bulk_test_flow_run_ids:
:vartype bulk_test_flow_run_ids: list[str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar flow_run_output_directory:
:vartype flow_run_output_directory: str
"""
_attribute_map = {
'node_inputs': {'key': 'nodeInputs', 'type': '{object}'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'run_mode': {'key': 'runMode', 'type': 'str'},
'batch_inputs': {'key': 'batch_inputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'tuning_node_names': {'key': 'tuningNodeNames', 'type': '[str]'},
'tuning_node_settings': {'key': 'tuningNodeSettings', 'type': '{TuningNodeSetting}'},
'baseline_variant_id': {'key': 'baselineVariantId', 'type': 'str'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
'variants': {'key': 'variants', 'type': '{[Node]}'},
'variants_tools': {'key': 'variantsTools', 'type': '[Tool]'},
'variants_codes': {'key': 'variantsCodes', 'type': '{str}'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'evaluation_flow_run_settings': {'key': 'evaluationFlowRunSettings', 'type': '{EvaluationFlowRunSettings}'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'data_inputs': {'key': 'dataInputs', 'type': '{str}'},
'bulk_test_flow_id': {'key': 'bulkTestFlowId', 'type': 'str'},
'bulk_test_flow_run_ids': {'key': 'bulkTestFlowRunIds', 'type': '[str]'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'flow_run_output_directory': {'key': 'flowRunOutputDirectory', 'type': 'str'},
}
def __init__(
self,
*,
node_inputs: Optional[Dict[str, Any]] = None,
flow_run_display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
run_mode: Optional[Union[str, "FlowRunMode"]] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
tuning_node_names: Optional[List[str]] = None,
tuning_node_settings: Optional[Dict[str, "TuningNodeSetting"]] = None,
baseline_variant_id: Optional[str] = None,
default_variant_id: Optional[str] = None,
variants: Optional[Dict[str, List["Node"]]] = None,
variants_tools: Optional[List["Tool"]] = None,
variants_codes: Optional[Dict[str, str]] = None,
node_name: Optional[str] = None,
bulk_test_id: Optional[str] = None,
evaluation_flow_run_settings: Optional[Dict[str, "EvaluationFlowRunSettings"]] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
data_inputs: Optional[Dict[str, str]] = None,
bulk_test_flow_id: Optional[str] = None,
bulk_test_flow_run_ids: Optional[List[str]] = None,
aml_compute_name: Optional[str] = None,
runtime_name: Optional[str] = None,
flow_run_output_directory: Optional[str] = None,
**kwargs
):
"""
:keyword node_inputs: This is a dictionary.
:paramtype node_inputs: dict[str, any]
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword run_mode: Possible values include: "Flow", "SingleNode", "FromNode", "BulkTest",
"Eval", "PairwiseEval".
:paramtype run_mode: str or ~flow.models.FlowRunMode
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword tuning_node_names:
:paramtype tuning_node_names: list[str]
:keyword tuning_node_settings: This is a dictionary.
:paramtype tuning_node_settings: dict[str, ~flow.models.TuningNodeSetting]
:keyword baseline_variant_id:
:paramtype baseline_variant_id: str
:keyword default_variant_id:
:paramtype default_variant_id: str
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, list[~flow.models.Node]]
:keyword variants_tools:
:paramtype variants_tools: list[~flow.models.Tool]
:keyword variants_codes: This is a dictionary.
:paramtype variants_codes: dict[str, str]
:keyword node_name:
:paramtype node_name: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword evaluation_flow_run_settings: This is a dictionary.
:paramtype evaluation_flow_run_settings: dict[str, ~flow.models.EvaluationFlowRunSettings]
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword data_inputs: This is a dictionary.
:paramtype data_inputs: dict[str, str]
:keyword bulk_test_flow_id:
:paramtype bulk_test_flow_id: str
:keyword bulk_test_flow_run_ids:
:paramtype bulk_test_flow_run_ids: list[str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword flow_run_output_directory:
:paramtype flow_run_output_directory: str
"""
super(FlowSubmitRunSettings, self).__init__(**kwargs)
self.node_inputs = node_inputs
self.flow_run_display_name = flow_run_display_name
self.description = description
self.tags = tags
self.properties = properties
self.run_mode = run_mode
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
self.tuning_node_names = tuning_node_names
self.tuning_node_settings = tuning_node_settings
self.baseline_variant_id = baseline_variant_id
self.default_variant_id = default_variant_id
self.variants = variants
self.variants_tools = variants_tools
self.variants_codes = variants_codes
self.node_name = node_name
self.bulk_test_id = bulk_test_id
self.evaluation_flow_run_settings = evaluation_flow_run_settings
self.inputs_mapping = inputs_mapping
self.data_inputs = data_inputs
self.bulk_test_flow_id = bulk_test_flow_id
self.bulk_test_flow_run_ids = bulk_test_flow_run_ids
self.aml_compute_name = aml_compute_name
self.runtime_name = runtime_name
self.flow_run_output_directory = flow_run_output_directory
class FlowTestInfo(msrest.serialization.Model):
"""FlowTestInfo.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_test_storage_setting:
:vartype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_test_storage_setting': {'key': 'flowTestStorageSetting', 'type': 'FlowTestStorageSetting'},
}
def __init__(
self,
*,
flow_run_id: Optional[str] = None,
flow_test_storage_setting: Optional["FlowTestStorageSetting"] = None,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_test_storage_setting:
:paramtype flow_test_storage_setting: ~flow.models.FlowTestStorageSetting
"""
super(FlowTestInfo, self).__init__(**kwargs)
self.flow_run_id = flow_run_id
self.flow_test_storage_setting = flow_test_storage_setting
class FlowTestStorageSetting(msrest.serialization.Model):
"""FlowTestStorageSetting.
:ivar storage_account_name:
:vartype storage_account_name: str
:ivar blob_container_name:
:vartype blob_container_name: str
:ivar flow_artifacts_root_path:
:vartype flow_artifacts_root_path: str
:ivar output_datastore_name:
:vartype output_datastore_name: str
"""
_attribute_map = {
'storage_account_name': {'key': 'storageAccountName', 'type': 'str'},
'blob_container_name': {'key': 'blobContainerName', 'type': 'str'},
'flow_artifacts_root_path': {'key': 'flowArtifactsRootPath', 'type': 'str'},
'output_datastore_name': {'key': 'outputDatastoreName', 'type': 'str'},
}
def __init__(
self,
*,
storage_account_name: Optional[str] = None,
blob_container_name: Optional[str] = None,
flow_artifacts_root_path: Optional[str] = None,
output_datastore_name: Optional[str] = None,
**kwargs
):
"""
:keyword storage_account_name:
:paramtype storage_account_name: str
:keyword blob_container_name:
:paramtype blob_container_name: str
:keyword flow_artifacts_root_path:
:paramtype flow_artifacts_root_path: str
:keyword output_datastore_name:
:paramtype output_datastore_name: str
"""
super(FlowTestStorageSetting, self).__init__(**kwargs)
self.storage_account_name = storage_account_name
self.blob_container_name = blob_container_name
self.flow_artifacts_root_path = flow_artifacts_root_path
self.output_datastore_name = output_datastore_name
class FlowToolsDto(msrest.serialization.Model):
"""FlowToolsDto.
:ivar package: This is a dictionary.
:vartype package: dict[str, ~flow.models.Tool]
:ivar code: This is a dictionary.
:vartype code: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'package': {'key': 'package', 'type': '{Tool}'},
'code': {'key': 'code', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
*,
package: Optional[Dict[str, "Tool"]] = None,
code: Optional[Dict[str, "Tool"]] = None,
errors: Optional[Dict[str, "ErrorResponse"]] = None,
**kwargs
):
"""
:keyword package: This is a dictionary.
:paramtype package: dict[str, ~flow.models.Tool]
:keyword code: This is a dictionary.
:paramtype code: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(FlowToolsDto, self).__init__(**kwargs)
self.package = package
self.code = code
self.errors = errors
class FlowToolSettingParameter(msrest.serialization.Model):
"""FlowToolSettingParameter.
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default:
:vartype default: str
:ivar advanced:
:vartype advanced: bool
:ivar enum:
:vartype enum: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar text_box_size:
:vartype text_box_size: int
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
"""
_attribute_map = {
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'enum': {'key': 'enum', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'text_box_size': {'key': 'text_box_size', 'type': 'int'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
}
def __init__(
self,
*,
type: Optional[List[Union[str, "ValueType"]]] = None,
default: Optional[str] = None,
advanced: Optional[bool] = None,
enum: Optional[List[Any]] = None,
model_list: Optional[List[str]] = None,
text_box_size: Optional[int] = None,
capabilities: Optional["AzureOpenAIModelCapabilities"] = None,
allow_manual_entry: Optional[bool] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default:
:paramtype default: str
:keyword advanced:
:paramtype advanced: bool
:keyword enum:
:paramtype enum: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword text_box_size:
:paramtype text_box_size: int
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
"""
super(FlowToolSettingParameter, self).__init__(**kwargs)
self.type = type
self.default = default
self.advanced = advanced
self.enum = enum
self.model_list = model_list
self.text_box_size = text_box_size
self.capabilities = capabilities
self.allow_manual_entry = allow_manual_entry
class FlowVariantNode(msrest.serialization.Model):
"""FlowVariantNode.
:ivar node:
:vartype node: ~flow.models.FlowNode
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'FlowNode'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
node: Optional["FlowNode"] = None,
description: Optional[str] = None,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.FlowNode
:keyword description:
:paramtype description: str
"""
super(FlowVariantNode, self).__init__(**kwargs)
self.node = node
self.description = description
class ForecastHorizon(msrest.serialization.Model):
"""ForecastHorizon.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.ForecastHorizonMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "ForecastHorizonMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.ForecastHorizonMode
:keyword value:
:paramtype value: int
"""
super(ForecastHorizon, self).__init__(**kwargs)
self.mode = mode
self.value = value
class ForecastingSettings(msrest.serialization.Model):
"""ForecastingSettings.
:ivar country_or_region_for_holidays:
:vartype country_or_region_for_holidays: str
:ivar time_column_name:
:vartype time_column_name: str
:ivar target_lags:
:vartype target_lags: ~flow.models.TargetLags
:ivar target_rolling_window_size:
:vartype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:ivar forecast_horizon:
:vartype forecast_horizon: ~flow.models.ForecastHorizon
:ivar time_series_id_column_names:
:vartype time_series_id_column_names: list[str]
:ivar frequency:
:vartype frequency: str
:ivar feature_lags:
:vartype feature_lags: str
:ivar seasonality:
:vartype seasonality: ~flow.models.Seasonality
:ivar short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:vartype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:ivar use_stl: Possible values include: "Season", "SeasonTrend".
:vartype use_stl: str or ~flow.models.UseStl
:ivar target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:vartype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:ivar cv_step_size:
:vartype cv_step_size: int
:ivar features_unknown_at_forecast_time:
:vartype features_unknown_at_forecast_time: list[str]
"""
_attribute_map = {
'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'},
'time_column_name': {'key': 'timeColumnName', 'type': 'str'},
'target_lags': {'key': 'targetLags', 'type': 'TargetLags'},
'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'},
'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'},
'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'},
'frequency': {'key': 'frequency', 'type': 'str'},
'feature_lags': {'key': 'featureLags', 'type': 'str'},
'seasonality': {'key': 'seasonality', 'type': 'Seasonality'},
'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'},
'use_stl': {'key': 'useStl', 'type': 'str'},
'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'},
'cv_step_size': {'key': 'cvStepSize', 'type': 'int'},
'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'},
}
def __init__(
self,
*,
country_or_region_for_holidays: Optional[str] = None,
time_column_name: Optional[str] = None,
target_lags: Optional["TargetLags"] = None,
target_rolling_window_size: Optional["TargetRollingWindowSize"] = None,
forecast_horizon: Optional["ForecastHorizon"] = None,
time_series_id_column_names: Optional[List[str]] = None,
frequency: Optional[str] = None,
feature_lags: Optional[str] = None,
seasonality: Optional["Seasonality"] = None,
short_series_handling_config: Optional[Union[str, "ShortSeriesHandlingConfiguration"]] = None,
use_stl: Optional[Union[str, "UseStl"]] = None,
target_aggregate_function: Optional[Union[str, "TargetAggregationFunction"]] = None,
cv_step_size: Optional[int] = None,
features_unknown_at_forecast_time: Optional[List[str]] = None,
**kwargs
):
"""
:keyword country_or_region_for_holidays:
:paramtype country_or_region_for_holidays: str
:keyword time_column_name:
:paramtype time_column_name: str
:keyword target_lags:
:paramtype target_lags: ~flow.models.TargetLags
:keyword target_rolling_window_size:
:paramtype target_rolling_window_size: ~flow.models.TargetRollingWindowSize
:keyword forecast_horizon:
:paramtype forecast_horizon: ~flow.models.ForecastHorizon
:keyword time_series_id_column_names:
:paramtype time_series_id_column_names: list[str]
:keyword frequency:
:paramtype frequency: str
:keyword feature_lags:
:paramtype feature_lags: str
:keyword seasonality:
:paramtype seasonality: ~flow.models.Seasonality
:keyword short_series_handling_config: Possible values include: "Auto", "Pad", "Drop".
:paramtype short_series_handling_config: str or ~flow.models.ShortSeriesHandlingConfiguration
:keyword use_stl: Possible values include: "Season", "SeasonTrend".
:paramtype use_stl: str or ~flow.models.UseStl
:keyword target_aggregate_function: Possible values include: "Sum", "Max", "Min", "Mean".
:paramtype target_aggregate_function: str or ~flow.models.TargetAggregationFunction
:keyword cv_step_size:
:paramtype cv_step_size: int
:keyword features_unknown_at_forecast_time:
:paramtype features_unknown_at_forecast_time: list[str]
"""
super(ForecastingSettings, self).__init__(**kwargs)
self.country_or_region_for_holidays = country_or_region_for_holidays
self.time_column_name = time_column_name
self.target_lags = target_lags
self.target_rolling_window_size = target_rolling_window_size
self.forecast_horizon = forecast_horizon
self.time_series_id_column_names = time_series_id_column_names
self.frequency = frequency
self.feature_lags = feature_lags
self.seasonality = seasonality
self.short_series_handling_config = short_series_handling_config
self.use_stl = use_stl
self.target_aggregate_function = target_aggregate_function
self.cv_step_size = cv_step_size
self.features_unknown_at_forecast_time = features_unknown_at_forecast_time
class GeneralSettings(msrest.serialization.Model):
"""GeneralSettings.
:ivar primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:vartype primary_metric: str or ~flow.models.PrimaryMetrics
:ivar task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:vartype task_type: str or ~flow.models.TaskType
:ivar log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:vartype log_verbosity: str or ~flow.models.LogVerbosity
"""
_attribute_map = {
'primary_metric': {'key': 'primaryMetric', 'type': 'str'},
'task_type': {'key': 'taskType', 'type': 'str'},
'log_verbosity': {'key': 'logVerbosity', 'type': 'str'},
}
def __init__(
self,
*,
primary_metric: Optional[Union[str, "PrimaryMetrics"]] = None,
task_type: Optional[Union[str, "TaskType"]] = None,
log_verbosity: Optional[Union[str, "LogVerbosity"]] = None,
**kwargs
):
"""
:keyword primary_metric: Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall",
"AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "SpearmanCorrelation",
"NormalizedRootMeanSquaredError", "R2Score", "NormalizedMeanAbsoluteError",
"NormalizedRootMeanSquaredLogError", "MeanAveragePrecision", "Iou".
:paramtype primary_metric: str or ~flow.models.PrimaryMetrics
:keyword task_type: Possible values include: "Classification", "Regression", "Forecasting",
"ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection",
"ImageInstanceSegmentation", "TextClassification", "TextMultiLabeling", "TextNER",
"TextClassificationMultilabel".
:paramtype task_type: str or ~flow.models.TaskType
:keyword log_verbosity: Possible values include: "NotSet", "Debug", "Info", "Warning", "Error",
"Critical".
:paramtype log_verbosity: str or ~flow.models.LogVerbosity
"""
super(GeneralSettings, self).__init__(**kwargs)
self.primary_metric = primary_metric
self.task_type = task_type
self.log_verbosity = log_verbosity
class GeneratePipelineComponentRequest(msrest.serialization.Model):
"""GeneratePipelineComponentRequest.
:ivar name:
:vartype name: str
:ivar display_name:
:vartype display_name: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar category:
:vartype category: str
:ivar version:
:vartype version: str
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar registry_name:
:vartype registry_name: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
module_scope: Optional[Union[str, "ModuleScope"]] = None,
is_deterministic: Optional[bool] = None,
category: Optional[str] = None,
version: Optional[str] = None,
set_as_default_version: Optional[bool] = None,
registry_name: Optional[str] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword display_name:
:paramtype display_name: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword category:
:paramtype category: str
:keyword version:
:paramtype version: str
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword registry_name:
:paramtype registry_name: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(GeneratePipelineComponentRequest, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.module_scope = module_scope
self.is_deterministic = is_deterministic
self.category = category
self.version = version
self.set_as_default_version = set_as_default_version
self.registry_name = registry_name
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class GenerateToolMetaRequest(msrest.serialization.Model):
"""GenerateToolMetaRequest.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.ToolSourceMeta]
:ivar working_dir:
:vartype working_dir: str
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{ToolSourceMeta}'},
'working_dir': {'key': 'working_dir', 'type': 'str'},
}
def __init__(
self,
*,
tools: Optional[Dict[str, "ToolSourceMeta"]] = None,
working_dir: Optional[str] = None,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.ToolSourceMeta]
:keyword working_dir:
:paramtype working_dir: str
"""
super(GenerateToolMetaRequest, self).__init__(**kwargs)
self.tools = tools
self.working_dir = working_dir
class GetDynamicListRequest(msrest.serialization.Model):
"""GetDynamicListRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
}
def __init__(
self,
*,
func_path: Optional[str] = None,
func_kwargs: Optional[Dict[str, Any]] = None,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
"""
super(GetDynamicListRequest, self).__init__(**kwargs)
self.func_path = func_path
self.func_kwargs = func_kwargs
class GetRunDataResultDto(msrest.serialization.Model):
"""GetRunDataResultDto.
:ivar run_metadata:
:vartype run_metadata: ~flow.models.RunDto
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar system_settings: Dictionary of :code:`<string>`.
:vartype system_settings: dict[str, str]
"""
_attribute_map = {
'run_metadata': {'key': 'runMetadata', 'type': 'RunDto'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'system_settings': {'key': 'systemSettings', 'type': '{str}'},
}
def __init__(
self,
*,
run_metadata: Optional["RunDto"] = None,
run_definition: Optional[Any] = None,
job_specification: Optional[Any] = None,
system_settings: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword run_metadata:
:paramtype run_metadata: ~flow.models.RunDto
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword system_settings: Dictionary of :code:`<string>`.
:paramtype system_settings: dict[str, str]
"""
super(GetRunDataResultDto, self).__init__(**kwargs)
self.run_metadata = run_metadata
self.run_definition = run_definition
self.job_specification = job_specification
self.system_settings = system_settings
class GetTrainingSessionDto(msrest.serialization.Model):
"""GetTrainingSessionDto.
:ivar properties:
:vartype properties: ~flow.models.SessionProperties
:ivar compute:
:vartype compute: ~flow.models.ComputeContract
"""
_attribute_map = {
'properties': {'key': 'properties', 'type': 'SessionProperties'},
'compute': {'key': 'compute', 'type': 'ComputeContract'},
}
def __init__(
self,
*,
properties: Optional["SessionProperties"] = None,
compute: Optional["ComputeContract"] = None,
**kwargs
):
"""
:keyword properties:
:paramtype properties: ~flow.models.SessionProperties
:keyword compute:
:paramtype compute: ~flow.models.ComputeContract
"""
super(GetTrainingSessionDto, self).__init__(**kwargs)
self.properties = properties
self.compute = compute
class GlobalJobDispatcherConfiguration(msrest.serialization.Model):
"""GlobalJobDispatcherConfiguration.
:ivar vm_size:
:vartype vm_size: list[str]
:ivar compute_type: Possible values include: "AmlCompute", "AmlK8s".
:vartype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:ivar region:
:vartype region: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar redispatch_allowed:
:vartype redispatch_allowed: bool
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar vc_list:
:vartype vc_list: list[str]
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar vc_block_list:
:vartype vc_block_list: list[str]
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
"""
_attribute_map = {
'vm_size': {'key': 'vmSize', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'redispatch_allowed': {'key': 'redispatchAllowed', 'type': 'bool'},
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'vc_list': {'key': 'vcList', 'type': '[str]'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
}
def __init__(
self,
*,
vm_size: Optional[List[str]] = None,
compute_type: Optional[Union[str, "GlobalJobDispatcherSupportedComputeType"]] = None,
region: Optional[List[str]] = None,
my_resource_only: Optional[bool] = None,
redispatch_allowed: Optional[bool] = None,
low_priority_vm_tolerant: Optional[bool] = None,
vc_list: Optional[List[str]] = None,
plan_id: Optional[str] = None,
plan_region_id: Optional[str] = None,
vc_block_list: Optional[List[str]] = None,
cluster_block_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword vm_size:
:paramtype vm_size: list[str]
:keyword compute_type: Possible values include: "AmlCompute", "AmlK8s".
:paramtype compute_type: str or ~flow.models.GlobalJobDispatcherSupportedComputeType
:keyword region:
:paramtype region: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword redispatch_allowed:
:paramtype redispatch_allowed: bool
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword vc_list:
:paramtype vc_list: list[str]
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
"""
super(GlobalJobDispatcherConfiguration, self).__init__(**kwargs)
self.vm_size = vm_size
self.compute_type = compute_type
self.region = region
self.my_resource_only = my_resource_only
self.redispatch_allowed = redispatch_allowed
self.low_priority_vm_tolerant = low_priority_vm_tolerant
self.vc_list = vc_list
self.plan_id = plan_id
self.plan_region_id = plan_region_id
self.vc_block_list = vc_block_list
self.cluster_block_list = cluster_block_list
class GlobsOptions(msrest.serialization.Model):
"""GlobsOptions.
:ivar glob_patterns:
:vartype glob_patterns: list[str]
"""
_attribute_map = {
'glob_patterns': {'key': 'globPatterns', 'type': '[str]'},
}
def __init__(
self,
*,
glob_patterns: Optional[List[str]] = None,
**kwargs
):
"""
:keyword glob_patterns:
:paramtype glob_patterns: list[str]
"""
super(GlobsOptions, self).__init__(**kwargs)
self.glob_patterns = glob_patterns
class GraphAnnotationNode(msrest.serialization.Model):
"""GraphAnnotationNode.
:ivar id:
:vartype id: str
:ivar content:
:vartype content: str
:ivar mentioned_node_names:
:vartype mentioned_node_names: list[str]
:ivar structured_content:
:vartype structured_content: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'mentioned_node_names': {'key': 'mentionedNodeNames', 'type': '[str]'},
'structured_content': {'key': 'structuredContent', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
content: Optional[str] = None,
mentioned_node_names: Optional[List[str]] = None,
structured_content: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword content:
:paramtype content: str
:keyword mentioned_node_names:
:paramtype mentioned_node_names: list[str]
:keyword structured_content:
:paramtype structured_content: str
"""
super(GraphAnnotationNode, self).__init__(**kwargs)
self.id = id
self.content = content
self.mentioned_node_names = mentioned_node_names
self.structured_content = structured_content
class GraphControlNode(msrest.serialization.Model):
"""GraphControlNode.
:ivar id:
:vartype id: str
:ivar control_type: The only acceptable values to pass in are None and "IfElse". The default
value is None.
:vartype control_type: str
:ivar control_parameter:
:vartype control_parameter: ~flow.models.ParameterAssignment
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'control_type': {'key': 'controlType', 'type': 'str'},
'control_parameter': {'key': 'controlParameter', 'type': 'ParameterAssignment'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
control_type: Optional[str] = None,
control_parameter: Optional["ParameterAssignment"] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword control_type: The only acceptable values to pass in are None and "IfElse". The
default value is None.
:paramtype control_type: str
:keyword control_parameter:
:paramtype control_parameter: ~flow.models.ParameterAssignment
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlNode, self).__init__(**kwargs)
self.id = id
self.control_type = control_type
self.control_parameter = control_parameter
self.run_attribution = run_attribution
class GraphControlReferenceNode(msrest.serialization.Model):
"""GraphControlReferenceNode.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar comment:
:vartype comment: str
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar reference_node_id:
:vartype reference_node_id: str
:ivar do_while_control_flow_info:
:vartype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:ivar parallel_for_control_flow_info:
:vartype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'reference_node_id': {'key': 'referenceNodeId', 'type': 'str'},
'do_while_control_flow_info': {'key': 'doWhileControlFlowInfo', 'type': 'DoWhileControlFlowInfo'},
'parallel_for_control_flow_info': {'key': 'parallelForControlFlowInfo', 'type': 'ParallelForControlFlowInfo'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
comment: Optional[str] = None,
control_flow_type: Optional[Union[str, "ControlFlowType"]] = None,
reference_node_id: Optional[str] = None,
do_while_control_flow_info: Optional["DoWhileControlFlowInfo"] = None,
parallel_for_control_flow_info: Optional["ParallelForControlFlowInfo"] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword comment:
:paramtype comment: str
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword reference_node_id:
:paramtype reference_node_id: str
:keyword do_while_control_flow_info:
:paramtype do_while_control_flow_info: ~flow.models.DoWhileControlFlowInfo
:keyword parallel_for_control_flow_info:
:paramtype parallel_for_control_flow_info: ~flow.models.ParallelForControlFlowInfo
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphControlReferenceNode, self).__init__(**kwargs)
self.id = id
self.name = name
self.comment = comment
self.control_flow_type = control_flow_type
self.reference_node_id = reference_node_id
self.do_while_control_flow_info = do_while_control_flow_info
self.parallel_for_control_flow_info = parallel_for_control_flow_info
self.run_attribution = run_attribution
class GraphDatasetNode(msrest.serialization.Model):
"""GraphDatasetNode.
:ivar id:
:vartype id: str
:ivar dataset_id:
:vartype dataset_id: str
:ivar data_path_parameter_name:
:vartype data_path_parameter_name: str
:ivar data_set_definition:
:vartype data_set_definition: ~flow.models.DataSetDefinition
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'data_path_parameter_name': {'key': 'dataPathParameterName', 'type': 'str'},
'data_set_definition': {'key': 'dataSetDefinition', 'type': 'DataSetDefinition'},
}
def __init__(
self,
*,
id: Optional[str] = None,
dataset_id: Optional[str] = None,
data_path_parameter_name: Optional[str] = None,
data_set_definition: Optional["DataSetDefinition"] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword dataset_id:
:paramtype dataset_id: str
:keyword data_path_parameter_name:
:paramtype data_path_parameter_name: str
:keyword data_set_definition:
:paramtype data_set_definition: ~flow.models.DataSetDefinition
"""
super(GraphDatasetNode, self).__init__(**kwargs)
self.id = id
self.dataset_id = dataset_id
self.data_path_parameter_name = data_path_parameter_name
self.data_set_definition = data_set_definition
class GraphDraftEntity(msrest.serialization.Model):
"""GraphDraftEntity.
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
module_nodes: Optional[List["GraphModuleNode"]] = None,
dataset_nodes: Optional[List["GraphDatasetNode"]] = None,
sub_graph_nodes: Optional[List["GraphReferenceNode"]] = None,
control_reference_nodes: Optional[List["GraphControlReferenceNode"]] = None,
control_nodes: Optional[List["GraphControlNode"]] = None,
edges: Optional[List["GraphEdge"]] = None,
entity_interface: Optional["EntityInterface"] = None,
graph_layout: Optional["GraphLayout"] = None,
created_by: Optional["CreatedBy"] = None,
last_updated_by: Optional["CreatedBy"] = None,
default_compute: Optional["ComputeSetting"] = None,
default_datastore: Optional["DatastoreSetting"] = None,
default_cloud_priority: Optional["CloudPrioritySetting"] = None,
extended_properties: Optional[Dict[str, str]] = None,
parent_sub_graph_module_ids: Optional[List[str]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphDraftEntity, self).__init__(**kwargs)
self.module_nodes = module_nodes
self.dataset_nodes = dataset_nodes
self.sub_graph_nodes = sub_graph_nodes
self.control_reference_nodes = control_reference_nodes
self.control_nodes = control_nodes
self.edges = edges
self.entity_interface = entity_interface
self.graph_layout = graph_layout
self.created_by = created_by
self.last_updated_by = last_updated_by
self.default_compute = default_compute
self.default_datastore = default_datastore
self.default_cloud_priority = default_cloud_priority
self.extended_properties = extended_properties
self.parent_sub_graph_module_ids = parent_sub_graph_module_ids
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class GraphEdge(msrest.serialization.Model):
"""GraphEdge.
:ivar source_output_port:
:vartype source_output_port: ~flow.models.PortInfo
:ivar destination_input_port:
:vartype destination_input_port: ~flow.models.PortInfo
"""
_attribute_map = {
'source_output_port': {'key': 'sourceOutputPort', 'type': 'PortInfo'},
'destination_input_port': {'key': 'destinationInputPort', 'type': 'PortInfo'},
}
def __init__(
self,
*,
source_output_port: Optional["PortInfo"] = None,
destination_input_port: Optional["PortInfo"] = None,
**kwargs
):
"""
:keyword source_output_port:
:paramtype source_output_port: ~flow.models.PortInfo
:keyword destination_input_port:
:paramtype destination_input_port: ~flow.models.PortInfo
"""
super(GraphEdge, self).__init__(**kwargs)
self.source_output_port = source_output_port
self.destination_input_port = destination_input_port
class GraphLayout(msrest.serialization.Model):
"""GraphLayout.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
node_layouts: Optional[Dict[str, "NodeLayout"]] = None,
extended_data: Optional[str] = None,
annotation_nodes: Optional[List["GraphAnnotationNode"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(GraphLayout, self).__init__(**kwargs)
self.node_layouts = node_layouts
self.extended_data = extended_data
self.annotation_nodes = annotation_nodes
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class GraphLayoutCreationInfo(msrest.serialization.Model):
"""GraphLayoutCreationInfo.
:ivar node_layouts: This is a dictionary.
:vartype node_layouts: dict[str, ~flow.models.NodeLayout]
:ivar extended_data:
:vartype extended_data: str
:ivar annotation_nodes:
:vartype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
_attribute_map = {
'node_layouts': {'key': 'nodeLayouts', 'type': '{NodeLayout}'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
'annotation_nodes': {'key': 'annotationNodes', 'type': '[GraphAnnotationNode]'},
}
def __init__(
self,
*,
node_layouts: Optional[Dict[str, "NodeLayout"]] = None,
extended_data: Optional[str] = None,
annotation_nodes: Optional[List["GraphAnnotationNode"]] = None,
**kwargs
):
"""
:keyword node_layouts: This is a dictionary.
:paramtype node_layouts: dict[str, ~flow.models.NodeLayout]
:keyword extended_data:
:paramtype extended_data: str
:keyword annotation_nodes:
:paramtype annotation_nodes: list[~flow.models.GraphAnnotationNode]
"""
super(GraphLayoutCreationInfo, self).__init__(**kwargs)
self.node_layouts = node_layouts
self.extended_data = extended_data
self.annotation_nodes = annotation_nodes
class GraphModuleNode(msrest.serialization.Model):
"""GraphModuleNode.
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar runconfig:
:vartype runconfig: str
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'module_type': {'key': 'moduleType', 'type': 'str'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
module_type: Optional[Union[str, "ModuleType"]] = None,
runconfig: Optional[str] = None,
id: Optional[str] = None,
module_id: Optional[str] = None,
comment: Optional[str] = None,
name: Optional[str] = None,
module_parameters: Optional[List["ParameterAssignment"]] = None,
module_metadata_parameters: Optional[List["ParameterAssignment"]] = None,
module_output_settings: Optional[List["OutputSetting"]] = None,
module_input_settings: Optional[List["InputSetting"]] = None,
use_graph_default_compute: Optional[bool] = None,
use_graph_default_datastore: Optional[bool] = None,
regenerate_output: Optional[bool] = None,
control_inputs: Optional[List["ControlInput"]] = None,
cloud_settings: Optional["CloudSettings"] = None,
execution_phase: Optional[Union[str, "ExecutionPhase"]] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword runconfig:
:paramtype runconfig: str
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphModuleNode, self).__init__(**kwargs)
self.module_type = module_type
self.runconfig = runconfig
self.id = id
self.module_id = module_id
self.comment = comment
self.name = name
self.module_parameters = module_parameters
self.module_metadata_parameters = module_metadata_parameters
self.module_output_settings = module_output_settings
self.module_input_settings = module_input_settings
self.use_graph_default_compute = use_graph_default_compute
self.use_graph_default_datastore = use_graph_default_datastore
self.regenerate_output = regenerate_output
self.control_inputs = control_inputs
self.cloud_settings = cloud_settings
self.execution_phase = execution_phase
self.run_attribution = run_attribution
class GraphModuleNodeRunSetting(msrest.serialization.Model):
"""GraphModuleNodeRunSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar step_type:
:vartype step_type: str
:ivar run_settings:
:vartype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'run_settings': {'key': 'runSettings', 'type': '[RunSettingParameterAssignment]'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
module_id: Optional[str] = None,
step_type: Optional[str] = None,
run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword step_type:
:paramtype step_type: str
:keyword run_settings:
:paramtype run_settings: list[~flow.models.RunSettingParameterAssignment]
"""
super(GraphModuleNodeRunSetting, self).__init__(**kwargs)
self.node_id = node_id
self.module_id = module_id
self.step_type = step_type
self.run_settings = run_settings
class GraphModuleNodeUIInputSetting(msrest.serialization.Model):
"""GraphModuleNodeUIInputSetting.
:ivar node_id:
:vartype node_id: str
:ivar module_id:
:vartype module_id: str
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.UIInputSetting]
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[UIInputSetting]'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
module_id: Optional[str] = None,
module_input_settings: Optional[List["UIInputSetting"]] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword module_id:
:paramtype module_id: str
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.UIInputSetting]
"""
super(GraphModuleNodeUIInputSetting, self).__init__(**kwargs)
self.node_id = node_id
self.module_id = module_id
self.module_input_settings = module_input_settings
class GraphNodeStatusInfo(msrest.serialization.Model):
"""GraphNodeStatusInfo.
:ivar status: Possible values include: "NotStarted", "Queued", "Running", "Failed", "Finished",
"Canceled", "PartiallyExecuted", "Bypassed".
:vartype status: str or ~flow.models.TaskStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar is_bypassed:
:vartype is_bypassed: bool
:ivar has_failed_child_run:
:vartype has_failed_child_run: bool
:ivar partially_executed:
:vartype partially_executed: bool
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar aether_creation_time:
:vartype aether_creation_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar run_history_creation_time:
:vartype run_history_creation_time: ~datetime.datetime
:ivar reuse_info:
:vartype reuse_info: ~flow.models.TaskReuseInfo
:ivar control_flow_info:
:vartype control_flow_info: ~flow.models.TaskControlFlowInfo
:ivar status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:vartype status_code: str or ~flow.models.TaskStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar schedule_time:
:vartype schedule_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar real_time_log_path:
:vartype real_time_log_path: str
:ivar has_warnings:
:vartype has_warnings: bool
:ivar composite_node_id:
:vartype composite_node_id: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'is_bypassed': {'key': 'isBypassed', 'type': 'bool'},
'has_failed_child_run': {'key': 'hasFailedChildRun', 'type': 'bool'},
'partially_executed': {'key': 'partiallyExecuted', 'type': 'bool'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'aether_creation_time': {'key': 'aetherCreationTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'run_history_creation_time': {'key': 'runHistoryCreationTime', 'type': 'iso-8601'},
'reuse_info': {'key': 'reuseInfo', 'type': 'TaskReuseInfo'},
'control_flow_info': {'key': 'controlFlowInfo', 'type': 'TaskControlFlowInfo'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'schedule_time': {'key': 'scheduleTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'real_time_log_path': {'key': 'realTimeLogPath', 'type': 'str'},
'has_warnings': {'key': 'hasWarnings', 'type': 'bool'},
'composite_node_id': {'key': 'compositeNodeId', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "TaskStatusCode"]] = None,
run_status: Optional[Union[str, "RunStatus"]] = None,
is_bypassed: Optional[bool] = None,
has_failed_child_run: Optional[bool] = None,
partially_executed: Optional[bool] = None,
properties: Optional[Dict[str, str]] = None,
aether_start_time: Optional[datetime.datetime] = None,
aether_end_time: Optional[datetime.datetime] = None,
aether_creation_time: Optional[datetime.datetime] = None,
run_history_start_time: Optional[datetime.datetime] = None,
run_history_end_time: Optional[datetime.datetime] = None,
run_history_creation_time: Optional[datetime.datetime] = None,
reuse_info: Optional["TaskReuseInfo"] = None,
control_flow_info: Optional["TaskControlFlowInfo"] = None,
status_code: Optional[Union[str, "TaskStatusCode"]] = None,
status_detail: Optional[str] = None,
creation_time: Optional[datetime.datetime] = None,
schedule_time: Optional[datetime.datetime] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
request_id: Optional[str] = None,
run_id: Optional[str] = None,
data_container_id: Optional[str] = None,
real_time_log_path: Optional[str] = None,
has_warnings: Optional[bool] = None,
composite_node_id: Optional[str] = None,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status: str or ~flow.models.TaskStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword is_bypassed:
:paramtype is_bypassed: bool
:keyword has_failed_child_run:
:paramtype has_failed_child_run: bool
:keyword partially_executed:
:paramtype partially_executed: bool
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword aether_creation_time:
:paramtype aether_creation_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword run_history_creation_time:
:paramtype run_history_creation_time: ~datetime.datetime
:keyword reuse_info:
:paramtype reuse_info: ~flow.models.TaskReuseInfo
:keyword control_flow_info:
:paramtype control_flow_info: ~flow.models.TaskControlFlowInfo
:keyword status_code: Possible values include: "NotStarted", "Queued", "Running", "Failed",
"Finished", "Canceled", "PartiallyExecuted", "Bypassed".
:paramtype status_code: str or ~flow.models.TaskStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword schedule_time:
:paramtype schedule_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword real_time_log_path:
:paramtype real_time_log_path: str
:keyword has_warnings:
:paramtype has_warnings: bool
:keyword composite_node_id:
:paramtype composite_node_id: str
"""
super(GraphNodeStatusInfo, self).__init__(**kwargs)
self.status = status
self.run_status = run_status
self.is_bypassed = is_bypassed
self.has_failed_child_run = has_failed_child_run
self.partially_executed = partially_executed
self.properties = properties
self.aether_start_time = aether_start_time
self.aether_end_time = aether_end_time
self.aether_creation_time = aether_creation_time
self.run_history_start_time = run_history_start_time
self.run_history_end_time = run_history_end_time
self.run_history_creation_time = run_history_creation_time
self.reuse_info = reuse_info
self.control_flow_info = control_flow_info
self.status_code = status_code
self.status_detail = status_detail
self.creation_time = creation_time
self.schedule_time = schedule_time
self.start_time = start_time
self.end_time = end_time
self.request_id = request_id
self.run_id = run_id
self.data_container_id = data_container_id
self.real_time_log_path = real_time_log_path
self.has_warnings = has_warnings
self.composite_node_id = composite_node_id
class GraphReferenceNode(msrest.serialization.Model):
"""GraphReferenceNode.
:ivar graph_id:
:vartype graph_id: str
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar module_id:
:vartype module_id: str
:ivar comment:
:vartype comment: str
:ivar name:
:vartype name: str
:ivar module_parameters:
:vartype module_parameters: list[~flow.models.ParameterAssignment]
:ivar module_metadata_parameters:
:vartype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:ivar module_output_settings:
:vartype module_output_settings: list[~flow.models.OutputSetting]
:ivar module_input_settings:
:vartype module_input_settings: list[~flow.models.InputSetting]
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar use_graph_default_datastore:
:vartype use_graph_default_datastore: bool
:ivar regenerate_output:
:vartype regenerate_output: bool
:ivar control_inputs:
:vartype control_inputs: list[~flow.models.ControlInput]
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar execution_phase: Possible values include: "Execution", "Initialization", "Finalization".
:vartype execution_phase: str or ~flow.models.ExecutionPhase
:ivar run_attribution:
:vartype run_attribution: str
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'module_id': {'key': 'moduleId', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'module_parameters': {'key': 'moduleParameters', 'type': '[ParameterAssignment]'},
'module_metadata_parameters': {'key': 'moduleMetadataParameters', 'type': '[ParameterAssignment]'},
'module_output_settings': {'key': 'moduleOutputSettings', 'type': '[OutputSetting]'},
'module_input_settings': {'key': 'moduleInputSettings', 'type': '[InputSetting]'},
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'use_graph_default_datastore': {'key': 'useGraphDefaultDatastore', 'type': 'bool'},
'regenerate_output': {'key': 'regenerateOutput', 'type': 'bool'},
'control_inputs': {'key': 'controlInputs', 'type': '[ControlInput]'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'execution_phase': {'key': 'executionPhase', 'type': 'str'},
'run_attribution': {'key': 'runAttribution', 'type': 'str'},
}
def __init__(
self,
*,
graph_id: Optional[str] = None,
default_compute: Optional["ComputeSetting"] = None,
default_datastore: Optional["DatastoreSetting"] = None,
id: Optional[str] = None,
module_id: Optional[str] = None,
comment: Optional[str] = None,
name: Optional[str] = None,
module_parameters: Optional[List["ParameterAssignment"]] = None,
module_metadata_parameters: Optional[List["ParameterAssignment"]] = None,
module_output_settings: Optional[List["OutputSetting"]] = None,
module_input_settings: Optional[List["InputSetting"]] = None,
use_graph_default_compute: Optional[bool] = None,
use_graph_default_datastore: Optional[bool] = None,
regenerate_output: Optional[bool] = None,
control_inputs: Optional[List["ControlInput"]] = None,
cloud_settings: Optional["CloudSettings"] = None,
execution_phase: Optional[Union[str, "ExecutionPhase"]] = None,
run_attribution: Optional[str] = None,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword module_id:
:paramtype module_id: str
:keyword comment:
:paramtype comment: str
:keyword name:
:paramtype name: str
:keyword module_parameters:
:paramtype module_parameters: list[~flow.models.ParameterAssignment]
:keyword module_metadata_parameters:
:paramtype module_metadata_parameters: list[~flow.models.ParameterAssignment]
:keyword module_output_settings:
:paramtype module_output_settings: list[~flow.models.OutputSetting]
:keyword module_input_settings:
:paramtype module_input_settings: list[~flow.models.InputSetting]
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword use_graph_default_datastore:
:paramtype use_graph_default_datastore: bool
:keyword regenerate_output:
:paramtype regenerate_output: bool
:keyword control_inputs:
:paramtype control_inputs: list[~flow.models.ControlInput]
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword execution_phase: Possible values include: "Execution", "Initialization",
"Finalization".
:paramtype execution_phase: str or ~flow.models.ExecutionPhase
:keyword run_attribution:
:paramtype run_attribution: str
"""
super(GraphReferenceNode, self).__init__(**kwargs)
self.graph_id = graph_id
self.default_compute = default_compute
self.default_datastore = default_datastore
self.id = id
self.module_id = module_id
self.comment = comment
self.name = name
self.module_parameters = module_parameters
self.module_metadata_parameters = module_metadata_parameters
self.module_output_settings = module_output_settings
self.module_input_settings = module_input_settings
self.use_graph_default_compute = use_graph_default_compute
self.use_graph_default_datastore = use_graph_default_datastore
self.regenerate_output = regenerate_output
self.control_inputs = control_inputs
self.cloud_settings = cloud_settings
self.execution_phase = execution_phase
self.run_attribution = run_attribution
class HdfsReference(msrest.serialization.Model):
"""HdfsReference.
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
"""
super(HdfsReference, self).__init__(**kwargs)
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
class HdiClusterComputeInfo(msrest.serialization.Model):
"""HdiClusterComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
*,
address: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
private_key: Optional[str] = None,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(HdiClusterComputeInfo, self).__init__(**kwargs)
self.address = address
self.username = username
self.password = password
self.private_key = private_key
class HdiConfiguration(msrest.serialization.Model):
"""HdiConfiguration.
:ivar yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:vartype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
_attribute_map = {
'yarn_deploy_mode': {'key': 'yarnDeployMode', 'type': 'str'},
}
def __init__(
self,
*,
yarn_deploy_mode: Optional[Union[str, "YarnDeployMode"]] = None,
**kwargs
):
"""
:keyword yarn_deploy_mode: Possible values include: "None", "Client", "Cluster".
:paramtype yarn_deploy_mode: str or ~flow.models.YarnDeployMode
"""
super(HdiConfiguration, self).__init__(**kwargs)
self.yarn_deploy_mode = yarn_deploy_mode
class HdiRunConfiguration(msrest.serialization.Model):
"""HdiRunConfiguration.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar compute_name:
:vartype compute_name: str
:ivar queue:
:vartype queue: str
:ivar driver_memory:
:vartype driver_memory: str
:ivar driver_cores:
:vartype driver_cores: int
:ivar executor_memory:
:vartype executor_memory: str
:ivar executor_cores:
:vartype executor_cores: int
:ivar number_executors:
:vartype number_executors: int
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar name:
:vartype name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'queue': {'key': 'queue', 'type': 'str'},
'driver_memory': {'key': 'driverMemory', 'type': 'str'},
'driver_cores': {'key': 'driverCores', 'type': 'int'},
'executor_memory': {'key': 'executorMemory', 'type': 'str'},
'executor_cores': {'key': 'executorCores', 'type': 'int'},
'number_executors': {'key': 'numberExecutors', 'type': 'int'},
'conf': {'key': 'conf', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
class_name: Optional[str] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
compute_name: Optional[str] = None,
queue: Optional[str] = None,
driver_memory: Optional[str] = None,
driver_cores: Optional[int] = None,
executor_memory: Optional[str] = None,
executor_cores: Optional[int] = None,
number_executors: Optional[int] = None,
conf: Optional[Dict[str, str]] = None,
name: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword compute_name:
:paramtype compute_name: str
:keyword queue:
:paramtype queue: str
:keyword driver_memory:
:paramtype driver_memory: str
:keyword driver_cores:
:paramtype driver_cores: int
:keyword executor_memory:
:paramtype executor_memory: str
:keyword executor_cores:
:paramtype executor_cores: int
:keyword number_executors:
:paramtype number_executors: int
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword name:
:paramtype name: str
"""
super(HdiRunConfiguration, self).__init__(**kwargs)
self.file = file
self.class_name = class_name
self.files = files
self.archives = archives
self.jars = jars
self.py_files = py_files
self.compute_name = compute_name
self.queue = queue
self.driver_memory = driver_memory
self.driver_cores = driver_cores
self.executor_memory = executor_memory
self.executor_cores = executor_cores
self.number_executors = number_executors
self.conf = conf
self.name = name
class HistoryConfiguration(msrest.serialization.Model):
"""HistoryConfiguration.
:ivar output_collection:
:vartype output_collection: bool
:ivar directories_to_watch:
:vartype directories_to_watch: list[str]
:ivar enable_m_lflow_tracking:
:vartype enable_m_lflow_tracking: bool
"""
_attribute_map = {
'output_collection': {'key': 'outputCollection', 'type': 'bool'},
'directories_to_watch': {'key': 'directoriesToWatch', 'type': '[str]'},
'enable_m_lflow_tracking': {'key': 'enableMLflowTracking', 'type': 'bool'},
}
def __init__(
self,
*,
output_collection: Optional[bool] = True,
directories_to_watch: Optional[List[str]] = ['logs'],
enable_m_lflow_tracking: Optional[bool] = True,
**kwargs
):
"""
:keyword output_collection:
:paramtype output_collection: bool
:keyword directories_to_watch:
:paramtype directories_to_watch: list[str]
:keyword enable_m_lflow_tracking:
:paramtype enable_m_lflow_tracking: bool
"""
super(HistoryConfiguration, self).__init__(**kwargs)
self.output_collection = output_collection
self.directories_to_watch = directories_to_watch
self.enable_m_lflow_tracking = enable_m_lflow_tracking
class HyperDriveConfiguration(msrest.serialization.Model):
"""HyperDriveConfiguration.
:ivar hyper_drive_run_config:
:vartype hyper_drive_run_config: str
:ivar primary_metric_goal:
:vartype primary_metric_goal: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'hyper_drive_run_config': {'key': 'hyperDriveRunConfig', 'type': 'str'},
'primary_metric_goal': {'key': 'primaryMetricGoal', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
*,
hyper_drive_run_config: Optional[str] = None,
primary_metric_goal: Optional[str] = None,
primary_metric_name: Optional[str] = None,
arguments: Optional[List["ArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword hyper_drive_run_config:
:paramtype hyper_drive_run_config: str
:keyword primary_metric_goal:
:paramtype primary_metric_goal: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(HyperDriveConfiguration, self).__init__(**kwargs)
self.hyper_drive_run_config = hyper_drive_run_config
self.primary_metric_goal = primary_metric_goal
self.primary_metric_name = primary_metric_name
self.arguments = arguments
class ICheckableLongRunningOperationResponse(msrest.serialization.Model):
"""ICheckableLongRunningOperationResponse.
:ivar completion_result: Any object.
:vartype completion_result: any
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'completion_result': {'key': 'completionResult', 'type': 'object'},
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
*,
completion_result: Optional[Any] = None,
location: Optional[str] = None,
operation_result: Optional[str] = None,
**kwargs
):
"""
:keyword completion_result: Any object.
:paramtype completion_result: any
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(ICheckableLongRunningOperationResponse, self).__init__(**kwargs)
self.completion_result = completion_result
self.location = location
self.operation_result = operation_result
class IdentityConfiguration(msrest.serialization.Model):
"""IdentityConfiguration.
:ivar type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:vartype type: str or ~flow.models.IdentityType
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar secret:
:vartype secret: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'secret': {'key': 'secret', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "IdentityType"]] = None,
properties: Optional[Dict[str, str]] = None,
secret: Optional[str] = None,
**kwargs
):
"""
:keyword type: Possible values include: "Managed", "ServicePrincipal", "AMLToken".
:paramtype type: str or ~flow.models.IdentityType
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword secret:
:paramtype secret: str
"""
super(IdentityConfiguration, self).__init__(**kwargs)
self.type = type
self.properties = properties
self.secret = secret
class IdentitySetting(msrest.serialization.Model):
"""IdentitySetting.
:ivar type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:vartype type: str or ~flow.models.AEVAIdentityType
:ivar client_id:
:vartype client_id: str
:ivar object_id:
:vartype object_id: str
:ivar msi_resource_id:
:vartype msi_resource_id: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
'object_id': {'key': 'objectId', 'type': 'str'},
'msi_resource_id': {'key': 'msiResourceId', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "AEVAIdentityType"]] = None,
client_id: Optional[str] = None,
object_id: Optional[str] = None,
msi_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword type: Possible values include: "UserIdentity", "Managed", "AMLToken".
:paramtype type: str or ~flow.models.AEVAIdentityType
:keyword client_id:
:paramtype client_id: str
:keyword object_id:
:paramtype object_id: str
:keyword msi_resource_id:
:paramtype msi_resource_id: str
"""
super(IdentitySetting, self).__init__(**kwargs)
self.type = type
self.client_id = client_id
self.object_id = object_id
self.msi_resource_id = msi_resource_id
class ImportDataTask(msrest.serialization.Model):
"""ImportDataTask.
:ivar data_transfer_source:
:vartype data_transfer_source: ~flow.models.DataTransferSource
"""
_attribute_map = {
'data_transfer_source': {'key': 'DataTransferSource', 'type': 'DataTransferSource'},
}
def __init__(
self,
*,
data_transfer_source: Optional["DataTransferSource"] = None,
**kwargs
):
"""
:keyword data_transfer_source:
:paramtype data_transfer_source: ~flow.models.DataTransferSource
"""
super(ImportDataTask, self).__init__(**kwargs)
self.data_transfer_source = data_transfer_source
class IndexedErrorResponse(msrest.serialization.Model):
"""IndexedErrorResponse.
:ivar code:
:vartype code: str
:ivar error_code_hierarchy:
:vartype error_code_hierarchy: str
:ivar message:
:vartype message: str
:ivar time:
:vartype time: ~datetime.datetime
:ivar component_name:
:vartype component_name: str
:ivar severity:
:vartype severity: int
:ivar details_uri:
:vartype details_uri: str
:ivar reference_code:
:vartype reference_code: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'error_code_hierarchy': {'key': 'errorCodeHierarchy', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'time': {'key': 'time', 'type': 'iso-8601'},
'component_name': {'key': 'componentName', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
}
def __init__(
self,
*,
code: Optional[str] = None,
error_code_hierarchy: Optional[str] = None,
message: Optional[str] = None,
time: Optional[datetime.datetime] = None,
component_name: Optional[str] = None,
severity: Optional[int] = None,
details_uri: Optional[str] = None,
reference_code: Optional[str] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword error_code_hierarchy:
:paramtype error_code_hierarchy: str
:keyword message:
:paramtype message: str
:keyword time:
:paramtype time: ~datetime.datetime
:keyword component_name:
:paramtype component_name: str
:keyword severity:
:paramtype severity: int
:keyword details_uri:
:paramtype details_uri: str
:keyword reference_code:
:paramtype reference_code: str
"""
super(IndexedErrorResponse, self).__init__(**kwargs)
self.code = code
self.error_code_hierarchy = error_code_hierarchy
self.message = message
self.time = time
self.component_name = component_name
self.severity = severity
self.details_uri = details_uri
self.reference_code = reference_code
class InitScriptInfoDto(msrest.serialization.Model):
"""InitScriptInfoDto.
:ivar dbfs:
:vartype dbfs: ~flow.models.DbfsStorageInfoDto
"""
_attribute_map = {
'dbfs': {'key': 'dbfs', 'type': 'DbfsStorageInfoDto'},
}
def __init__(
self,
*,
dbfs: Optional["DbfsStorageInfoDto"] = None,
**kwargs
):
"""
:keyword dbfs:
:paramtype dbfs: ~flow.models.DbfsStorageInfoDto
"""
super(InitScriptInfoDto, self).__init__(**kwargs)
self.dbfs = dbfs
class InnerErrorDetails(msrest.serialization.Model):
"""InnerErrorDetails.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
"""
super(InnerErrorDetails, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
class InnerErrorResponse(msrest.serialization.Model):
"""A nested structure of errors.
:ivar code: The error code.
:vartype code: str
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
}
def __init__(
self,
*,
code: Optional[str] = None,
inner_error: Optional["InnerErrorResponse"] = None,
**kwargs
):
"""
:keyword code: The error code.
:paramtype code: str
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
"""
super(InnerErrorResponse, self).__init__(**kwargs)
self.code = code
self.inner_error = inner_error
class InputAsset(msrest.serialization.Model):
"""InputAsset.
:ivar asset:
:vartype asset: ~flow.models.Asset
:ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:vartype mechanism: str or ~flow.models.DeliveryMechanism
:ivar environment_variable_name:
:vartype environment_variable_name: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar options: Dictionary of :code:`<string>`.
:vartype options: dict[str, str]
"""
_attribute_map = {
'asset': {'key': 'asset', 'type': 'Asset'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'options': {'key': 'options', 'type': '{str}'},
}
def __init__(
self,
*,
asset: Optional["Asset"] = None,
mechanism: Optional[Union[str, "DeliveryMechanism"]] = None,
environment_variable_name: Optional[str] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
options: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword asset:
:paramtype asset: ~flow.models.Asset
:keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
:paramtype mechanism: str or ~flow.models.DeliveryMechanism
:keyword environment_variable_name:
:paramtype environment_variable_name: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword options: Dictionary of :code:`<string>`.
:paramtype options: dict[str, str]
"""
super(InputAsset, self).__init__(**kwargs)
self.asset = asset
self.mechanism = mechanism
self.environment_variable_name = environment_variable_name
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.options = options
class InputData(msrest.serialization.Model):
"""InputData.
:ivar dataset_id:
:vartype dataset_id: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar value:
:vartype value: str
"""
_attribute_map = {
'dataset_id': {'key': 'datasetId', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
dataset_id: Optional[str] = None,
mode: Optional[Union[str, "DataBindingMode"]] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword dataset_id:
:paramtype dataset_id: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword value:
:paramtype value: str
"""
super(InputData, self).__init__(**kwargs)
self.dataset_id = dataset_id
self.mode = mode
self.value = value
class InputDataBinding(msrest.serialization.Model):
"""InputDataBinding.
:ivar data_id:
:vartype data_id: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar value:
:vartype value: str
:ivar asset_uri:
:vartype asset_uri: str
:ivar job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
"""
_attribute_map = {
'data_id': {'key': 'dataId', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'value': {'key': 'value', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
}
def __init__(
self,
*,
data_id: Optional[str] = None,
path_on_compute: Optional[str] = None,
mode: Optional[Union[str, "DataBindingMode"]] = None,
description: Optional[str] = None,
uri: Optional["MfeInternalUriReference"] = None,
value: Optional[str] = None,
asset_uri: Optional[str] = None,
job_input_type: Optional[Union[str, "JobInputType"]] = None,
**kwargs
):
"""
:keyword data_id:
:paramtype data_id: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword value:
:paramtype value: str
:keyword asset_uri:
:paramtype asset_uri: str
:keyword job_input_type: Possible values include: "Dataset", "Uri", "Literal", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
"""
super(InputDataBinding, self).__init__(**kwargs)
self.data_id = data_id
self.path_on_compute = path_on_compute
self.mode = mode
self.description = description
self.uri = uri
self.value = value
self.asset_uri = asset_uri
self.job_input_type = job_input_type
class InputDefinition(msrest.serialization.Model):
"""InputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar default: Anything.
:vartype default: any
:ivar description:
:vartype description: str
:ivar enum:
:vartype enum: list[str]
:ivar enabled_by:
:vartype enabled_by: str
:ivar enabled_by_type:
:vartype enabled_by_type: list[str or ~flow.models.ValueType]
:ivar enabled_by_value:
:vartype enabled_by_value: list[any]
:ivar model_list:
:vartype model_list: list[str]
:ivar capabilities:
:vartype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:ivar dynamic_list:
:vartype dynamic_list: ~flow.models.ToolInputDynamicList
:ivar allow_manual_entry:
:vartype allow_manual_entry: bool
:ivar is_multi_select:
:vartype is_multi_select: bool
:ivar generated_by:
:vartype generated_by: ~flow.models.ToolInputGeneratedBy
:ivar input_type: Possible values include: "default", "uionly_hidden".
:vartype input_type: str or ~flow.models.InputType
:ivar advanced:
:vartype advanced: bool
:ivar ui_hints: This is a dictionary.
:vartype ui_hints: dict[str, any]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'default': {'key': 'default', 'type': 'object'},
'description': {'key': 'description', 'type': 'str'},
'enum': {'key': 'enum', 'type': '[str]'},
'enabled_by': {'key': 'enabled_by', 'type': 'str'},
'enabled_by_type': {'key': 'enabled_by_type', 'type': '[str]'},
'enabled_by_value': {'key': 'enabled_by_value', 'type': '[object]'},
'model_list': {'key': 'model_list', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': 'AzureOpenAIModelCapabilities'},
'dynamic_list': {'key': 'dynamic_list', 'type': 'ToolInputDynamicList'},
'allow_manual_entry': {'key': 'allow_manual_entry', 'type': 'bool'},
'is_multi_select': {'key': 'is_multi_select', 'type': 'bool'},
'generated_by': {'key': 'generated_by', 'type': 'ToolInputGeneratedBy'},
'input_type': {'key': 'input_type', 'type': 'str'},
'advanced': {'key': 'advanced', 'type': 'bool'},
'ui_hints': {'key': 'ui_hints', 'type': '{object}'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[List[Union[str, "ValueType"]]] = None,
default: Optional[Any] = None,
description: Optional[str] = None,
enum: Optional[List[str]] = None,
enabled_by: Optional[str] = None,
enabled_by_type: Optional[List[Union[str, "ValueType"]]] = None,
enabled_by_value: Optional[List[Any]] = None,
model_list: Optional[List[str]] = None,
capabilities: Optional["AzureOpenAIModelCapabilities"] = None,
dynamic_list: Optional["ToolInputDynamicList"] = None,
allow_manual_entry: Optional[bool] = None,
is_multi_select: Optional[bool] = None,
generated_by: Optional["ToolInputGeneratedBy"] = None,
input_type: Optional[Union[str, "InputType"]] = None,
advanced: Optional[bool] = None,
ui_hints: Optional[Dict[str, Any]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword default: Anything.
:paramtype default: any
:keyword description:
:paramtype description: str
:keyword enum:
:paramtype enum: list[str]
:keyword enabled_by:
:paramtype enabled_by: str
:keyword enabled_by_type:
:paramtype enabled_by_type: list[str or ~flow.models.ValueType]
:keyword enabled_by_value:
:paramtype enabled_by_value: list[any]
:keyword model_list:
:paramtype model_list: list[str]
:keyword capabilities:
:paramtype capabilities: ~flow.models.AzureOpenAIModelCapabilities
:keyword dynamic_list:
:paramtype dynamic_list: ~flow.models.ToolInputDynamicList
:keyword allow_manual_entry:
:paramtype allow_manual_entry: bool
:keyword is_multi_select:
:paramtype is_multi_select: bool
:keyword generated_by:
:paramtype generated_by: ~flow.models.ToolInputGeneratedBy
:keyword input_type: Possible values include: "default", "uionly_hidden".
:paramtype input_type: str or ~flow.models.InputType
:keyword advanced:
:paramtype advanced: bool
:keyword ui_hints: This is a dictionary.
:paramtype ui_hints: dict[str, any]
"""
super(InputDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.default = default
self.description = description
self.enum = enum
self.enabled_by = enabled_by
self.enabled_by_type = enabled_by_type
self.enabled_by_value = enabled_by_value
self.model_list = model_list
self.capabilities = capabilities
self.dynamic_list = dynamic_list
self.allow_manual_entry = allow_manual_entry
self.is_multi_select = is_multi_select
self.generated_by = generated_by
self.input_type = input_type
self.advanced = advanced
self.ui_hints = ui_hints
class InputOutputPortMetadata(msrest.serialization.Model):
"""InputOutputPortMetadata.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar graph_module_node_id:
:vartype graph_module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar schema:
:vartype schema: str
:ivar name:
:vartype name: str
:ivar id:
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'graph_module_node_id': {'key': 'graphModuleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'schema': {'key': 'schema', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
graph_module_node_id: Optional[str] = None,
port_name: Optional[str] = None,
schema: Optional[str] = None,
name: Optional[str] = None,
**kwargs
):
"""
:keyword graph_module_node_id:
:paramtype graph_module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword schema:
:paramtype schema: str
:keyword name:
:paramtype name: str
"""
super(InputOutputPortMetadata, self).__init__(**kwargs)
self.graph_module_node_id = graph_module_node_id
self.port_name = port_name
self.schema = schema
self.name = name
self.id = None
class InputSetting(msrest.serialization.Model):
"""InputSetting.
:ivar name:
:vartype name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar options: This is a dictionary.
:vartype options: dict[str, str]
:ivar additional_transformations:
:vartype additional_transformations: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'additional_transformations': {'key': 'additionalTransformations', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
options: Optional[Dict[str, str]] = None,
additional_transformations: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword options: This is a dictionary.
:paramtype options: dict[str, str]
:keyword additional_transformations:
:paramtype additional_transformations: str
"""
super(InputSetting, self).__init__(**kwargs)
self.name = name
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.options = options
self.additional_transformations = additional_transformations
class IntellectualPropertyPublisherInformation(msrest.serialization.Model):
"""IntellectualPropertyPublisherInformation.
:ivar intellectual_property_publisher:
:vartype intellectual_property_publisher: str
"""
_attribute_map = {
'intellectual_property_publisher': {'key': 'intellectualPropertyPublisher', 'type': 'str'},
}
def __init__(
self,
*,
intellectual_property_publisher: Optional[str] = None,
**kwargs
):
"""
:keyword intellectual_property_publisher:
:paramtype intellectual_property_publisher: str
"""
super(IntellectualPropertyPublisherInformation, self).__init__(**kwargs)
self.intellectual_property_publisher = intellectual_property_publisher
class InteractiveConfig(msrest.serialization.Model):
"""InteractiveConfig.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
*,
is_ssh_enabled: Optional[bool] = None,
ssh_public_key: Optional[str] = None,
is_i_python_enabled: Optional[bool] = None,
is_tensor_board_enabled: Optional[bool] = None,
interactive_port: Optional[int] = None,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfig, self).__init__(**kwargs)
self.is_ssh_enabled = is_ssh_enabled
self.ssh_public_key = ssh_public_key
self.is_i_python_enabled = is_i_python_enabled
self.is_tensor_board_enabled = is_tensor_board_enabled
self.interactive_port = interactive_port
class InteractiveConfiguration(msrest.serialization.Model):
"""InteractiveConfiguration.
:ivar is_ssh_enabled:
:vartype is_ssh_enabled: bool
:ivar ssh_public_key:
:vartype ssh_public_key: str
:ivar is_i_python_enabled:
:vartype is_i_python_enabled: bool
:ivar is_tensor_board_enabled:
:vartype is_tensor_board_enabled: bool
:ivar interactive_port:
:vartype interactive_port: int
"""
_attribute_map = {
'is_ssh_enabled': {'key': 'isSSHEnabled', 'type': 'bool'},
'ssh_public_key': {'key': 'sshPublicKey', 'type': 'str'},
'is_i_python_enabled': {'key': 'isIPythonEnabled', 'type': 'bool'},
'is_tensor_board_enabled': {'key': 'isTensorBoardEnabled', 'type': 'bool'},
'interactive_port': {'key': 'interactivePort', 'type': 'int'},
}
def __init__(
self,
*,
is_ssh_enabled: Optional[bool] = None,
ssh_public_key: Optional[str] = None,
is_i_python_enabled: Optional[bool] = None,
is_tensor_board_enabled: Optional[bool] = None,
interactive_port: Optional[int] = None,
**kwargs
):
"""
:keyword is_ssh_enabled:
:paramtype is_ssh_enabled: bool
:keyword ssh_public_key:
:paramtype ssh_public_key: str
:keyword is_i_python_enabled:
:paramtype is_i_python_enabled: bool
:keyword is_tensor_board_enabled:
:paramtype is_tensor_board_enabled: bool
:keyword interactive_port:
:paramtype interactive_port: int
"""
super(InteractiveConfiguration, self).__init__(**kwargs)
self.is_ssh_enabled = is_ssh_enabled
self.ssh_public_key = ssh_public_key
self.is_i_python_enabled = is_i_python_enabled
self.is_tensor_board_enabled = is_tensor_board_enabled
self.interactive_port = interactive_port
class JobCost(msrest.serialization.Model):
"""JobCost.
:ivar charged_cpu_core_seconds:
:vartype charged_cpu_core_seconds: float
:ivar charged_cpu_memory_megabyte_seconds:
:vartype charged_cpu_memory_megabyte_seconds: float
:ivar charged_gpu_seconds:
:vartype charged_gpu_seconds: float
:ivar charged_node_utilization_seconds:
:vartype charged_node_utilization_seconds: float
"""
_attribute_map = {
'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'},
'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'},
'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'},
'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'},
}
def __init__(
self,
*,
charged_cpu_core_seconds: Optional[float] = None,
charged_cpu_memory_megabyte_seconds: Optional[float] = None,
charged_gpu_seconds: Optional[float] = None,
charged_node_utilization_seconds: Optional[float] = None,
**kwargs
):
"""
:keyword charged_cpu_core_seconds:
:paramtype charged_cpu_core_seconds: float
:keyword charged_cpu_memory_megabyte_seconds:
:paramtype charged_cpu_memory_megabyte_seconds: float
:keyword charged_gpu_seconds:
:paramtype charged_gpu_seconds: float
:keyword charged_node_utilization_seconds:
:paramtype charged_node_utilization_seconds: float
"""
super(JobCost, self).__init__(**kwargs)
self.charged_cpu_core_seconds = charged_cpu_core_seconds
self.charged_cpu_memory_megabyte_seconds = charged_cpu_memory_megabyte_seconds
self.charged_gpu_seconds = charged_gpu_seconds
self.charged_node_utilization_seconds = charged_node_utilization_seconds
class JobEndpoint(msrest.serialization.Model):
"""JobEndpoint.
:ivar type:
:vartype type: str
:ivar port:
:vartype port: int
:ivar endpoint:
:vartype endpoint: str
:ivar status:
:vartype status: str
:ivar error_message:
:vartype error_message: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar nodes:
:vartype nodes: ~flow.models.MfeInternalNodes
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'nodes': {'key': 'nodes', 'type': 'MfeInternalNodes'},
}
def __init__(
self,
*,
type: Optional[str] = None,
port: Optional[int] = None,
endpoint: Optional[str] = None,
status: Optional[str] = None,
error_message: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
nodes: Optional["MfeInternalNodes"] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword port:
:paramtype port: int
:keyword endpoint:
:paramtype endpoint: str
:keyword status:
:paramtype status: str
:keyword error_message:
:paramtype error_message: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword nodes:
:paramtype nodes: ~flow.models.MfeInternalNodes
"""
super(JobEndpoint, self).__init__(**kwargs)
self.type = type
self.port = port
self.endpoint = endpoint
self.status = status
self.error_message = error_message
self.properties = properties
self.nodes = nodes
class JobInput(msrest.serialization.Model):
"""JobInput.
All required parameters must be populated in order to send to Azure.
:ivar job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_input_type: str or ~flow.models.JobInputType
:ivar description:
:vartype description: str
"""
_validation = {
'job_input_type': {'required': True},
}
_attribute_map = {
'job_input_type': {'key': 'jobInputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
job_input_type: Union[str, "JobInputType"],
description: Optional[str] = None,
**kwargs
):
"""
:keyword job_input_type: Required. Possible values include: "Dataset", "Uri", "Literal",
"UriFile", "UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_input_type: str or ~flow.models.JobInputType
:keyword description:
:paramtype description: str
"""
super(JobInput, self).__init__(**kwargs)
self.job_input_type = job_input_type
self.description = description
class JobOutput(msrest.serialization.Model):
"""JobOutput.
All required parameters must be populated in order to send to Azure.
:ivar job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar description:
:vartype description: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_validation = {
'job_output_type': {'required': True},
}
_attribute_map = {
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
*,
job_output_type: Union[str, "JobOutputType"],
description: Optional[str] = None,
auto_delete_setting: Optional["AutoDeleteSetting"] = None,
**kwargs
):
"""
:keyword job_output_type: Required. Possible values include: "Uri", "Dataset", "UriFile",
"UriFolder", "MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword description:
:paramtype description: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(JobOutput, self).__init__(**kwargs)
self.job_output_type = job_output_type
self.description = description
self.auto_delete_setting = auto_delete_setting
class JobOutputArtifacts(msrest.serialization.Model):
"""JobOutputArtifacts.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
*,
datastore_id: Optional[str] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path:
:paramtype path: str
"""
super(JobOutputArtifacts, self).__init__(**kwargs)
self.datastore_id = datastore_id
self.path = path
class JobScheduleDto(msrest.serialization.Model):
"""JobScheduleDto.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar job_definition_id:
:vartype job_definition_id: str
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'job_definition_id': {'key': 'jobDefinitionId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
job_type: Optional[Union[str, "JobType"]] = None,
system_data: Optional["SystemData"] = None,
name: Optional[str] = None,
job_definition_id: Optional[str] = None,
display_name: Optional[str] = None,
trigger_type: Optional[Union[str, "TriggerType"]] = None,
recurrence: Optional["Recurrence"] = None,
cron: Optional["Cron"] = None,
status: Optional[Union[str, "ScheduleStatus"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword job_definition_id:
:paramtype job_definition_id: str
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(JobScheduleDto, self).__init__(**kwargs)
self.job_type = job_type
self.system_data = system_data
self.name = name
self.job_definition_id = job_definition_id
self.display_name = display_name
self.trigger_type = trigger_type
self.recurrence = recurrence
self.cron = cron
self.status = status
self.description = description
self.tags = tags
self.properties = properties
class K8SConfiguration(msrest.serialization.Model):
"""K8SConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
:ivar resource_configuration:
:vartype resource_configuration: ~flow.models.ResourceConfig
:ivar priority_configuration:
:vartype priority_configuration: ~flow.models.PriorityConfig
:ivar interactive_configuration:
:vartype interactive_configuration: ~flow.models.InteractiveConfig
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
'resource_configuration': {'key': 'resourceConfiguration', 'type': 'ResourceConfig'},
'priority_configuration': {'key': 'priorityConfiguration', 'type': 'PriorityConfig'},
'interactive_configuration': {'key': 'interactiveConfiguration', 'type': 'InteractiveConfig'},
}
def __init__(
self,
*,
max_retry_count: Optional[int] = None,
resource_configuration: Optional["ResourceConfig"] = None,
priority_configuration: Optional["PriorityConfig"] = None,
interactive_configuration: Optional["InteractiveConfig"] = None,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
:keyword resource_configuration:
:paramtype resource_configuration: ~flow.models.ResourceConfig
:keyword priority_configuration:
:paramtype priority_configuration: ~flow.models.PriorityConfig
:keyword interactive_configuration:
:paramtype interactive_configuration: ~flow.models.InteractiveConfig
"""
super(K8SConfiguration, self).__init__(**kwargs)
self.max_retry_count = max_retry_count
self.resource_configuration = resource_configuration
self.priority_configuration = priority_configuration
self.interactive_configuration = interactive_configuration
class KeyValuePairComponentNameMetaInfoErrorResponse(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoErrorResponse.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value: The error response.
:vartype value: ~flow.models.ErrorResponse
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ErrorResponse'},
}
def __init__(
self,
*,
key: Optional["ComponentNameMetaInfo"] = None,
value: Optional["ErrorResponse"] = None,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value: The error response.
:paramtype value: ~flow.models.ErrorResponse
"""
super(KeyValuePairComponentNameMetaInfoErrorResponse, self).__init__(**kwargs)
self.key = key
self.value = value
class KeyValuePairComponentNameMetaInfoModuleDto(msrest.serialization.Model):
"""KeyValuePairComponentNameMetaInfoModuleDto.
:ivar key:
:vartype key: ~flow.models.ComponentNameMetaInfo
:ivar value:
:vartype value: ~flow.models.ModuleDto
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'ComponentNameMetaInfo'},
'value': {'key': 'value', 'type': 'ModuleDto'},
}
def __init__(
self,
*,
key: Optional["ComponentNameMetaInfo"] = None,
value: Optional["ModuleDto"] = None,
**kwargs
):
"""
:keyword key:
:paramtype key: ~flow.models.ComponentNameMetaInfo
:keyword value:
:paramtype value: ~flow.models.ModuleDto
"""
super(KeyValuePairComponentNameMetaInfoModuleDto, self).__init__(**kwargs)
self.key = key
self.value = value
class KeyValuePairStringObject(msrest.serialization.Model):
"""KeyValuePairStringObject.
:ivar key:
:vartype key: str
:ivar value: Anything.
:vartype value: any
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'object'},
}
def __init__(
self,
*,
key: Optional[str] = None,
value: Optional[Any] = None,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value: Anything.
:paramtype value: any
"""
super(KeyValuePairStringObject, self).__init__(**kwargs)
self.key = key
self.value = value
class KubernetesConfiguration(msrest.serialization.Model):
"""KubernetesConfiguration.
:ivar instance_type:
:vartype instance_type: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
def __init__(
self,
*,
instance_type: Optional[str] = None,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
"""
super(KubernetesConfiguration, self).__init__(**kwargs)
self.instance_type = instance_type
class Kwarg(msrest.serialization.Model):
"""Kwarg.
:ivar key:
:vartype key: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
key: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword key:
:paramtype key: str
:keyword value:
:paramtype value: str
"""
super(Kwarg, self).__init__(**kwargs)
self.key = key
self.value = value
class LegacyDataPath(msrest.serialization.Model):
"""LegacyDataPath.
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar relative_path:
:vartype relative_path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
relative_path: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword relative_path:
:paramtype relative_path: str
"""
super(LegacyDataPath, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.data_store_mode = data_store_mode
self.relative_path = relative_path
class LimitSettings(msrest.serialization.Model):
"""LimitSettings.
:ivar max_trials:
:vartype max_trials: int
:ivar timeout:
:vartype timeout: str
:ivar trial_timeout:
:vartype trial_timeout: str
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
:ivar max_cores_per_trial:
:vartype max_cores_per_trial: int
:ivar exit_score:
:vartype exit_score: float
:ivar enable_early_termination:
:vartype enable_early_termination: bool
:ivar max_nodes:
:vartype max_nodes: int
"""
_attribute_map = {
'max_trials': {'key': 'maxTrials', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'str'},
'trial_timeout': {'key': 'trialTimeout', 'type': 'str'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'},
'exit_score': {'key': 'exitScore', 'type': 'float'},
'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'},
'max_nodes': {'key': 'maxNodes', 'type': 'int'},
}
def __init__(
self,
*,
max_trials: Optional[int] = None,
timeout: Optional[str] = None,
trial_timeout: Optional[str] = None,
max_concurrent_trials: Optional[int] = None,
max_cores_per_trial: Optional[int] = None,
exit_score: Optional[float] = None,
enable_early_termination: Optional[bool] = None,
max_nodes: Optional[int] = None,
**kwargs
):
"""
:keyword max_trials:
:paramtype max_trials: int
:keyword timeout:
:paramtype timeout: str
:keyword trial_timeout:
:paramtype trial_timeout: str
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
:keyword max_cores_per_trial:
:paramtype max_cores_per_trial: int
:keyword exit_score:
:paramtype exit_score: float
:keyword enable_early_termination:
:paramtype enable_early_termination: bool
:keyword max_nodes:
:paramtype max_nodes: int
"""
super(LimitSettings, self).__init__(**kwargs)
self.max_trials = max_trials
self.timeout = timeout
self.trial_timeout = trial_timeout
self.max_concurrent_trials = max_concurrent_trials
self.max_cores_per_trial = max_cores_per_trial
self.exit_score = exit_score
self.enable_early_termination = enable_early_termination
self.max_nodes = max_nodes
class LinkedADBWorkspaceMetadata(msrest.serialization.Model):
"""LinkedADBWorkspaceMetadata.
:ivar workspace_id:
:vartype workspace_id: str
:ivar region:
:vartype region: str
"""
_attribute_map = {
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
}
def __init__(
self,
*,
workspace_id: Optional[str] = None,
region: Optional[str] = None,
**kwargs
):
"""
:keyword workspace_id:
:paramtype workspace_id: str
:keyword region:
:paramtype region: str
"""
super(LinkedADBWorkspaceMetadata, self).__init__(**kwargs)
self.workspace_id = workspace_id
self.region = region
class LinkedPipelineInfo(msrest.serialization.Model):
"""LinkedPipelineInfo.
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar module_node_id:
:vartype module_node_id: str
:ivar port_name:
:vartype port_name: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar is_direct_link:
:vartype is_direct_link: bool
"""
_attribute_map = {
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'is_direct_link': {'key': 'isDirectLink', 'type': 'bool'},
}
def __init__(
self,
*,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
module_node_id: Optional[str] = None,
port_name: Optional[str] = None,
linked_pipeline_draft_id: Optional[str] = None,
linked_pipeline_run_id: Optional[str] = None,
is_direct_link: Optional[bool] = None,
**kwargs
):
"""
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword module_node_id:
:paramtype module_node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword is_direct_link:
:paramtype is_direct_link: bool
"""
super(LinkedPipelineInfo, self).__init__(**kwargs)
self.pipeline_type = pipeline_type
self.module_node_id = module_node_id
self.port_name = port_name
self.linked_pipeline_draft_id = linked_pipeline_draft_id
self.linked_pipeline_run_id = linked_pipeline_run_id
self.is_direct_link = is_direct_link
class LoadFlowAsComponentRequest(msrest.serialization.Model):
"""LoadFlowAsComponentRequest.
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar node_variant:
:vartype node_variant: str
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar runtime_name:
:vartype runtime_name: str
:ivar session_id:
:vartype session_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
"""
_attribute_map = {
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
}
def __init__(
self,
*,
component_name: Optional[str] = None,
component_version: Optional[str] = None,
display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
is_deterministic: Optional[bool] = None,
flow_definition_file_path: Optional[str] = None,
flow_definition_resource_id: Optional[str] = None,
flow_definition_data_store_name: Optional[str] = None,
flow_definition_blob_path: Optional[str] = None,
flow_definition_data_uri: Optional[str] = None,
node_variant: Optional[str] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
connections: Optional[Dict[str, Dict[str, str]]] = None,
environment_variables: Optional[Dict[str, str]] = None,
runtime_name: Optional[str] = None,
session_id: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
**kwargs
):
"""
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword node_variant:
:paramtype node_variant: str
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword runtime_name:
:paramtype runtime_name: str
:keyword session_id:
:paramtype session_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
"""
super(LoadFlowAsComponentRequest, self).__init__(**kwargs)
self.component_name = component_name
self.component_version = component_version
self.display_name = display_name
self.description = description
self.tags = tags
self.properties = properties
self.is_deterministic = is_deterministic
self.flow_definition_file_path = flow_definition_file_path
self.flow_definition_resource_id = flow_definition_resource_id
self.flow_definition_data_store_name = flow_definition_data_store_name
self.flow_definition_blob_path = flow_definition_blob_path
self.flow_definition_data_uri = flow_definition_data_uri
self.node_variant = node_variant
self.inputs_mapping = inputs_mapping
self.connections = connections
self.environment_variables = environment_variables
self.runtime_name = runtime_name
self.session_id = session_id
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
class LogRunTerminatedEventDto(msrest.serialization.Model):
"""LogRunTerminatedEventDto.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
:ivar last_checked_time:
:vartype last_checked_time: ~datetime.datetime
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'last_checked_time': {'key': 'lastCheckedTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
next_action_interval_in_seconds: Optional[int] = None,
action_type: Optional[Union[str, "ActionType"]] = None,
last_checked_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
:keyword last_checked_time:
:paramtype last_checked_time: ~datetime.datetime
"""
super(LogRunTerminatedEventDto, self).__init__(**kwargs)
self.next_action_interval_in_seconds = next_action_interval_in_seconds
self.action_type = action_type
self.last_checked_time = last_checked_time
class LongRunningOperationUriResponse(msrest.serialization.Model):
"""LongRunningOperationUriResponse.
:ivar location:
:vartype location: str
:ivar operation_result:
:vartype operation_result: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'operation_result': {'key': 'operationResult', 'type': 'str'},
}
def __init__(
self,
*,
location: Optional[str] = None,
operation_result: Optional[str] = None,
**kwargs
):
"""
:keyword location:
:paramtype location: str
:keyword operation_result:
:paramtype operation_result: str
"""
super(LongRunningOperationUriResponse, self).__init__(**kwargs)
self.location = location
self.operation_result = operation_result
class LongRunningUpdateRegistryComponentRequest(msrest.serialization.Model):
"""LongRunningUpdateRegistryComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype update_type: str or ~flow.models.LongRunningUpdateType
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
*,
display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
registry_name: Optional[str] = None,
component_name: Optional[str] = None,
component_version: Optional[str] = None,
update_type: Optional[Union[str, "LongRunningUpdateType"]] = None,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: Possible values include: "EnableModule", "DisableModule",
"UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype update_type: str or ~flow.models.LongRunningUpdateType
"""
super(LongRunningUpdateRegistryComponentRequest, self).__init__(**kwargs)
self.display_name = display_name
self.description = description
self.tags = tags
self.registry_name = registry_name
self.component_name = component_name
self.component_version = component_version
self.update_type = update_type
class ManagedServiceIdentity(msrest.serialization.Model):
"""ManagedServiceIdentity.
All required parameters must be populated in order to send to Azure.
:ivar type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:vartype type: str or ~flow.models.ManagedServiceIdentityType
:ivar principal_id:
:vartype principal_id: str
:ivar tenant_id:
:vartype tenant_id: str
:ivar user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:vartype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
}
def __init__(
self,
*,
type: Union[str, "ManagedServiceIdentityType"],
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None,
**kwargs
):
"""
:keyword type: Required. Possible values include: "SystemAssigned", "UserAssigned",
"SystemAssignedUserAssigned", "None".
:paramtype type: str or ~flow.models.ManagedServiceIdentityType
:keyword principal_id:
:paramtype principal_id: str
:keyword tenant_id:
:paramtype tenant_id: str
:keyword user_assigned_identities: Dictionary of :code:`<UserAssignedIdentity>`.
:paramtype user_assigned_identities: dict[str, ~flow.models.UserAssignedIdentity]
"""
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = type
self.principal_id = principal_id
self.tenant_id = tenant_id
self.user_assigned_identities = user_assigned_identities
class MavenLibraryDto(msrest.serialization.Model):
"""MavenLibraryDto.
:ivar coordinates:
:vartype coordinates: str
:ivar repo:
:vartype repo: str
:ivar exclusions:
:vartype exclusions: list[str]
"""
_attribute_map = {
'coordinates': {'key': 'coordinates', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
'exclusions': {'key': 'exclusions', 'type': '[str]'},
}
def __init__(
self,
*,
coordinates: Optional[str] = None,
repo: Optional[str] = None,
exclusions: Optional[List[str]] = None,
**kwargs
):
"""
:keyword coordinates:
:paramtype coordinates: str
:keyword repo:
:paramtype repo: str
:keyword exclusions:
:paramtype exclusions: list[str]
"""
super(MavenLibraryDto, self).__init__(**kwargs)
self.coordinates = coordinates
self.repo = repo
self.exclusions = exclusions
class MetricProperties(msrest.serialization.Model):
"""MetricProperties.
:ivar ux_metric_type:
:vartype ux_metric_type: str
"""
_attribute_map = {
'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'},
}
def __init__(
self,
*,
ux_metric_type: Optional[str] = None,
**kwargs
):
"""
:keyword ux_metric_type:
:paramtype ux_metric_type: str
"""
super(MetricProperties, self).__init__(**kwargs)
self.ux_metric_type = ux_metric_type
class MetricSchemaDto(msrest.serialization.Model):
"""MetricSchemaDto.
:ivar num_properties:
:vartype num_properties: int
:ivar properties:
:vartype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
_attribute_map = {
'num_properties': {'key': 'numProperties', 'type': 'int'},
'properties': {'key': 'properties', 'type': '[MetricSchemaPropertyDto]'},
}
def __init__(
self,
*,
num_properties: Optional[int] = None,
properties: Optional[List["MetricSchemaPropertyDto"]] = None,
**kwargs
):
"""
:keyword num_properties:
:paramtype num_properties: int
:keyword properties:
:paramtype properties: list[~flow.models.MetricSchemaPropertyDto]
"""
super(MetricSchemaDto, self).__init__(**kwargs)
self.num_properties = num_properties
self.properties = properties
class MetricSchemaPropertyDto(msrest.serialization.Model):
"""MetricSchemaPropertyDto.
:ivar property_id:
:vartype property_id: str
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'property_id': {'key': 'propertyId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
property_id: Optional[str] = None,
name: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword property_id:
:paramtype property_id: str
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(MetricSchemaPropertyDto, self).__init__(**kwargs)
self.property_id = property_id
self.name = name
self.type = type
class MetricV2Dto(msrest.serialization.Model):
"""MetricV2Dto.
:ivar data_container_id:
:vartype data_container_id: str
:ivar name:
:vartype name: str
:ivar columns: This is a dictionary.
:vartype columns: dict[str, str or ~flow.models.MetricValueType]
:ivar properties:
:vartype properties: ~flow.models.MetricProperties
:ivar namespace:
:vartype namespace: str
:ivar standard_schema_id:
:vartype standard_schema_id: str
:ivar value:
:vartype value: list[~flow.models.MetricV2Value]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'columns': {'key': 'columns', 'type': '{str}'},
'properties': {'key': 'properties', 'type': 'MetricProperties'},
'namespace': {'key': 'namespace', 'type': 'str'},
'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
'value': {'key': 'value', 'type': '[MetricV2Value]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
data_container_id: Optional[str] = None,
name: Optional[str] = None,
columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None,
properties: Optional["MetricProperties"] = None,
namespace: Optional[str] = None,
standard_schema_id: Optional[str] = None,
value: Optional[List["MetricV2Value"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword name:
:paramtype name: str
:keyword columns: This is a dictionary.
:paramtype columns: dict[str, str or ~flow.models.MetricValueType]
:keyword properties:
:paramtype properties: ~flow.models.MetricProperties
:keyword namespace:
:paramtype namespace: str
:keyword standard_schema_id:
:paramtype standard_schema_id: str
:keyword value:
:paramtype value: list[~flow.models.MetricV2Value]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(MetricV2Dto, self).__init__(**kwargs)
self.data_container_id = data_container_id
self.name = name
self.columns = columns
self.properties = properties
self.namespace = namespace
self.standard_schema_id = standard_schema_id
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class MetricV2Value(msrest.serialization.Model):
"""MetricV2Value.
:ivar metric_id:
:vartype metric_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar step:
:vartype step: long
:ivar data: Dictionary of :code:`<any>`.
:vartype data: dict[str, any]
:ivar sas_uri:
:vartype sas_uri: str
"""
_attribute_map = {
'metric_id': {'key': 'metricId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'step': {'key': 'step', 'type': 'long'},
'data': {'key': 'data', 'type': '{object}'},
'sas_uri': {'key': 'sasUri', 'type': 'str'},
}
def __init__(
self,
*,
metric_id: Optional[str] = None,
created_utc: Optional[datetime.datetime] = None,
step: Optional[int] = None,
data: Optional[Dict[str, Any]] = None,
sas_uri: Optional[str] = None,
**kwargs
):
"""
:keyword metric_id:
:paramtype metric_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword step:
:paramtype step: long
:keyword data: Dictionary of :code:`<any>`.
:paramtype data: dict[str, any]
:keyword sas_uri:
:paramtype sas_uri: str
"""
super(MetricV2Value, self).__init__(**kwargs)
self.metric_id = metric_id
self.created_utc = created_utc
self.step = step
self.data = data
self.sas_uri = sas_uri
class MfeInternalAutologgerSettings(msrest.serialization.Model):
"""MfeInternalAutologgerSettings.
:ivar mlflow_autologger: Possible values include: "Enabled", "Disabled".
:vartype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
_attribute_map = {
'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'},
}
def __init__(
self,
*,
mlflow_autologger: Optional[Union[str, "MfeInternalMLFlowAutologgerState"]] = None,
**kwargs
):
"""
:keyword mlflow_autologger: Possible values include: "Enabled", "Disabled".
:paramtype mlflow_autologger: str or ~flow.models.MfeInternalMLFlowAutologgerState
"""
super(MfeInternalAutologgerSettings, self).__init__(**kwargs)
self.mlflow_autologger = mlflow_autologger
class MfeInternalIdentityConfiguration(msrest.serialization.Model):
"""MfeInternalIdentityConfiguration.
:ivar identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:vartype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
_attribute_map = {
'identity_type': {'key': 'identityType', 'type': 'str'},
}
def __init__(
self,
*,
identity_type: Optional[Union[str, "MfeInternalIdentityType"]] = None,
**kwargs
):
"""
:keyword identity_type: Possible values include: "Managed", "AMLToken", "UserIdentity".
:paramtype identity_type: str or ~flow.models.MfeInternalIdentityType
"""
super(MfeInternalIdentityConfiguration, self).__init__(**kwargs)
self.identity_type = identity_type
class MfeInternalNodes(msrest.serialization.Model):
"""MfeInternalNodes.
:ivar nodes_value_type: The only acceptable values to pass in are None and "All". The default
value is None.
:vartype nodes_value_type: str
"""
_attribute_map = {
'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'},
}
def __init__(
self,
*,
nodes_value_type: Optional[str] = None,
**kwargs
):
"""
:keyword nodes_value_type: The only acceptable values to pass in are None and "All". The
default value is None.
:paramtype nodes_value_type: str
"""
super(MfeInternalNodes, self).__init__(**kwargs)
self.nodes_value_type = nodes_value_type
class MfeInternalOutputData(msrest.serialization.Model):
"""MfeInternalOutputData.
:ivar dataset_name:
:vartype dataset_name: str
:ivar datastore:
:vartype datastore: str
:ivar datapath:
:vartype datapath: str
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
"""
_attribute_map = {
'dataset_name': {'key': 'datasetName', 'type': 'str'},
'datastore': {'key': 'datastore', 'type': 'str'},
'datapath': {'key': 'datapath', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
}
def __init__(
self,
*,
dataset_name: Optional[str] = None,
datastore: Optional[str] = None,
datapath: Optional[str] = None,
mode: Optional[Union[str, "DataBindingMode"]] = None,
**kwargs
):
"""
:keyword dataset_name:
:paramtype dataset_name: str
:keyword datastore:
:paramtype datastore: str
:keyword datapath:
:paramtype datapath: str
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
"""
super(MfeInternalOutputData, self).__init__(**kwargs)
self.dataset_name = dataset_name
self.datastore = datastore
self.datapath = datapath
self.mode = mode
class MfeInternalSecretConfiguration(msrest.serialization.Model):
"""MfeInternalSecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
*,
workspace_secret_name: Optional[str] = None,
uri: Optional[str] = None,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(MfeInternalSecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = workspace_secret_name
self.uri = uri
class MfeInternalUriReference(msrest.serialization.Model):
"""MfeInternalUriReference.
:ivar file:
:vartype file: str
:ivar folder:
:vartype folder: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'folder': {'key': 'folder', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
folder: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword folder:
:paramtype folder: str
"""
super(MfeInternalUriReference, self).__init__(**kwargs)
self.file = file
self.folder = folder
class MfeInternalV20211001ComponentJob(msrest.serialization.Model):
"""MfeInternalV20211001ComponentJob.
:ivar compute_id:
:vartype compute_id: str
:ivar component_id:
:vartype component_id: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar overrides: Anything.
:vartype overrides: any
"""
_attribute_map = {
'compute_id': {'key': 'computeId', 'type': 'str'},
'component_id': {'key': 'componentId', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'overrides': {'key': 'overrides', 'type': 'object'},
}
def __init__(
self,
*,
compute_id: Optional[str] = None,
component_id: Optional[str] = None,
inputs: Optional[Dict[str, "JobInput"]] = None,
outputs: Optional[Dict[str, "JobOutput"]] = None,
overrides: Optional[Any] = None,
**kwargs
):
"""
:keyword compute_id:
:paramtype compute_id: str
:keyword component_id:
:paramtype component_id: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword overrides: Anything.
:paramtype overrides: any
"""
super(MfeInternalV20211001ComponentJob, self).__init__(**kwargs)
self.compute_id = compute_id
self.component_id = component_id
self.inputs = inputs
self.outputs = outputs
self.overrides = overrides
class MinMaxParameterRule(msrest.serialization.Model):
"""MinMaxParameterRule.
:ivar min:
:vartype min: float
:ivar max:
:vartype max: float
"""
_attribute_map = {
'min': {'key': 'min', 'type': 'float'},
'max': {'key': 'max', 'type': 'float'},
}
def __init__(
self,
*,
min: Optional[float] = None,
max: Optional[float] = None,
**kwargs
):
"""
:keyword min:
:paramtype min: float
:keyword max:
:paramtype max: float
"""
super(MinMaxParameterRule, self).__init__(**kwargs)
self.min = min
self.max = max
class MlcComputeInfo(msrest.serialization.Model):
"""MlcComputeInfo.
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
"""
_attribute_map = {
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
}
def __init__(
self,
*,
mlc_compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
"""
super(MlcComputeInfo, self).__init__(**kwargs)
self.mlc_compute_type = mlc_compute_type
class ModelDto(msrest.serialization.Model):
"""ModelDto.
:ivar feed_name:
:vartype feed_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar aml_data_store_name:
:vartype aml_data_store_name: str
:ivar relative_path:
:vartype relative_path: str
:ivar id:
:vartype id: str
:ivar version:
:vartype version: str
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar arm_id:
:vartype arm_id: str
:ivar online_endpoint_yaml_str:
:vartype online_endpoint_yaml_str: str
"""
_attribute_map = {
'feed_name': {'key': 'feedName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'aml_data_store_name': {'key': 'amlDataStoreName', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'arm_id': {'key': 'armId', 'type': 'str'},
'online_endpoint_yaml_str': {'key': 'onlineEndpointYamlStr', 'type': 'str'},
}
def __init__(
self,
*,
feed_name: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
aml_data_store_name: Optional[str] = None,
relative_path: Optional[str] = None,
id: Optional[str] = None,
version: Optional[str] = None,
system_data: Optional["SystemData"] = None,
arm_id: Optional[str] = None,
online_endpoint_yaml_str: Optional[str] = None,
**kwargs
):
"""
:keyword feed_name:
:paramtype feed_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword aml_data_store_name:
:paramtype aml_data_store_name: str
:keyword relative_path:
:paramtype relative_path: str
:keyword id:
:paramtype id: str
:keyword version:
:paramtype version: str
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword arm_id:
:paramtype arm_id: str
:keyword online_endpoint_yaml_str:
:paramtype online_endpoint_yaml_str: str
"""
super(ModelDto, self).__init__(**kwargs)
self.feed_name = feed_name
self.name = name
self.description = description
self.aml_data_store_name = aml_data_store_name
self.relative_path = relative_path
self.id = id
self.version = version
self.system_data = system_data
self.arm_id = arm_id
self.online_endpoint_yaml_str = online_endpoint_yaml_str
class ModelManagementErrorResponse(msrest.serialization.Model):
"""ModelManagementErrorResponse.
:ivar code:
:vartype code: str
:ivar status_code:
:vartype status_code: int
:ivar message:
:vartype message: str
:ivar target:
:vartype target: str
:ivar details:
:vartype details: list[~flow.models.InnerErrorDetails]
:ivar correlation: Dictionary of :code:`<string>`.
:vartype correlation: dict[str, str]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'status_code': {'key': 'statusCode', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[InnerErrorDetails]'},
'correlation': {'key': 'correlation', 'type': '{str}'},
}
def __init__(
self,
*,
code: Optional[str] = None,
status_code: Optional[int] = None,
message: Optional[str] = None,
target: Optional[str] = None,
details: Optional[List["InnerErrorDetails"]] = None,
correlation: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword status_code:
:paramtype status_code: int
:keyword message:
:paramtype message: str
:keyword target:
:paramtype target: str
:keyword details:
:paramtype details: list[~flow.models.InnerErrorDetails]
:keyword correlation: Dictionary of :code:`<string>`.
:paramtype correlation: dict[str, str]
"""
super(ModelManagementErrorResponse, self).__init__(**kwargs)
self.code = code
self.status_code = status_code
self.message = message
self.target = target
self.details = details
self.correlation = correlation
class ModifyPipelineJobScheduleDto(msrest.serialization.Model):
"""ModifyPipelineJobScheduleDto.
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
pipeline_job_name: Optional[str] = None,
pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None,
display_name: Optional[str] = None,
trigger_type: Optional[Union[str, "TriggerType"]] = None,
recurrence: Optional["Recurrence"] = None,
cron: Optional["Cron"] = None,
status: Optional[Union[str, "ScheduleStatus"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(ModifyPipelineJobScheduleDto, self).__init__(**kwargs)
self.pipeline_job_name = pipeline_job_name
self.pipeline_job_runtime_settings = pipeline_job_runtime_settings
self.display_name = display_name
self.trigger_type = trigger_type
self.recurrence = recurrence
self.cron = cron
self.status = status
self.description = description
self.tags = tags
self.properties = properties
class ModuleDto(msrest.serialization.Model):
"""ModuleDto.
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
*,
namespace: Optional[str] = None,
tags: Optional[List[str]] = None,
display_name: Optional[str] = None,
dict_tags: Optional[Dict[str, str]] = None,
module_version_id: Optional[str] = None,
feed_name: Optional[str] = None,
registry_name: Optional[str] = None,
module_name: Optional[str] = None,
module_version: Optional[str] = None,
description: Optional[str] = None,
owner: Optional[str] = None,
job_type: Optional[str] = None,
default_version: Optional[str] = None,
family_id: Optional[str] = None,
help_document: Optional[str] = None,
codegen_by: Optional[str] = None,
arm_id: Optional[str] = None,
module_scope: Optional[Union[str, "ModuleScope"]] = None,
module_entity: Optional["ModuleEntity"] = None,
input_types: Optional[List[str]] = None,
output_types: Optional[List[str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
yaml_link: Optional[str] = None,
yaml_link_with_commit_sha: Optional[str] = None,
module_source_type: Optional[Union[str, "ModuleSourceType"]] = None,
registered_by: Optional[str] = None,
versions: Optional[List["AzureMLModuleVersionDescriptor"]] = None,
is_default_module_version: Optional[bool] = None,
system_data: Optional["SystemData"] = None,
system_meta: Optional["SystemMeta"] = None,
snapshot_id: Optional[str] = None,
entry: Optional[str] = None,
os_type: Optional[str] = None,
require_gpu: Optional[bool] = None,
module_python_interface: Optional["ModulePythonInterface"] = None,
environment_asset_id: Optional[str] = None,
run_setting_parameters: Optional[List["RunSettingParameter"]] = None,
supported_ui_input_data_delivery_modes: Optional[Dict[str, List[Union[str, "UIInputDataDeliveryMode"]]]] = None,
output_setting_specs: Optional[Dict[str, "OutputSettingSpec"]] = None,
yaml_str: Optional[str] = None,
**kwargs
):
"""
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·9qwi7e·schemas·moduledto·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDto, self).__init__(**kwargs)
self.namespace = namespace
self.tags = tags
self.display_name = display_name
self.dict_tags = dict_tags
self.module_version_id = module_version_id
self.feed_name = feed_name
self.registry_name = registry_name
self.module_name = module_name
self.module_version = module_version
self.description = description
self.owner = owner
self.job_type = job_type
self.default_version = default_version
self.family_id = family_id
self.help_document = help_document
self.codegen_by = codegen_by
self.arm_id = arm_id
self.module_scope = module_scope
self.module_entity = module_entity
self.input_types = input_types
self.output_types = output_types
self.entity_status = entity_status
self.created_date = created_date
self.last_modified_date = last_modified_date
self.yaml_link = yaml_link
self.yaml_link_with_commit_sha = yaml_link_with_commit_sha
self.module_source_type = module_source_type
self.registered_by = registered_by
self.versions = versions
self.is_default_module_version = is_default_module_version
self.system_data = system_data
self.system_meta = system_meta
self.snapshot_id = snapshot_id
self.entry = entry
self.os_type = os_type
self.require_gpu = require_gpu
self.module_python_interface = module_python_interface
self.environment_asset_id = environment_asset_id
self.run_setting_parameters = run_setting_parameters
self.supported_ui_input_data_delivery_modes = supported_ui_input_data_delivery_modes
self.output_setting_specs = output_setting_specs
self.yaml_str = yaml_str
class ModuleDtoWithErrors(msrest.serialization.Model):
"""ModuleDtoWithErrors.
:ivar version_id_to_module_dto: This is a dictionary.
:vartype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:ivar name_and_version_to_module_dto:
:vartype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:ivar version_id_to_error: This is a dictionary.
:vartype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:ivar name_and_version_to_error:
:vartype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
_attribute_map = {
'version_id_to_module_dto': {'key': 'versionIdToModuleDto', 'type': '{ModuleDto}'},
'name_and_version_to_module_dto': {'key': 'nameAndVersionToModuleDto', 'type': '[KeyValuePairComponentNameMetaInfoModuleDto]'},
'version_id_to_error': {'key': 'versionIdToError', 'type': '{ErrorResponse}'},
'name_and_version_to_error': {'key': 'nameAndVersionToError', 'type': '[KeyValuePairComponentNameMetaInfoErrorResponse]'},
}
def __init__(
self,
*,
version_id_to_module_dto: Optional[Dict[str, "ModuleDto"]] = None,
name_and_version_to_module_dto: Optional[List["KeyValuePairComponentNameMetaInfoModuleDto"]] = None,
version_id_to_error: Optional[Dict[str, "ErrorResponse"]] = None,
name_and_version_to_error: Optional[List["KeyValuePairComponentNameMetaInfoErrorResponse"]] = None,
**kwargs
):
"""
:keyword version_id_to_module_dto: This is a dictionary.
:paramtype version_id_to_module_dto: dict[str, ~flow.models.ModuleDto]
:keyword name_and_version_to_module_dto:
:paramtype name_and_version_to_module_dto:
list[~flow.models.KeyValuePairComponentNameMetaInfoModuleDto]
:keyword version_id_to_error: This is a dictionary.
:paramtype version_id_to_error: dict[str, ~flow.models.ErrorResponse]
:keyword name_and_version_to_error:
:paramtype name_and_version_to_error:
list[~flow.models.KeyValuePairComponentNameMetaInfoErrorResponse]
"""
super(ModuleDtoWithErrors, self).__init__(**kwargs)
self.version_id_to_module_dto = version_id_to_module_dto
self.name_and_version_to_module_dto = name_and_version_to_module_dto
self.version_id_to_error = version_id_to_error
self.name_and_version_to_error = name_and_version_to_error
class ModuleDtoWithValidateStatus(msrest.serialization.Model):
"""ModuleDtoWithValidateStatus.
:ivar existing_module_entity:
:vartype existing_module_entity: ~flow.models.ModuleEntity
:ivar status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:vartype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:ivar status_details:
:vartype status_details: str
:ivar error_details:
:vartype error_details: list[str]
:ivar serialized_module_info:
:vartype serialized_module_info: str
:ivar namespace:
:vartype namespace: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar display_name:
:vartype display_name: str
:ivar dict_tags: Dictionary of :code:`<string>`.
:vartype dict_tags: dict[str, str]
:ivar module_version_id:
:vartype module_version_id: str
:ivar feed_name:
:vartype feed_name: str
:ivar registry_name:
:vartype registry_name: str
:ivar module_name:
:vartype module_name: str
:ivar module_version:
:vartype module_version: str
:ivar description:
:vartype description: str
:ivar owner:
:vartype owner: str
:ivar job_type:
:vartype job_type: str
:ivar default_version:
:vartype default_version: str
:ivar family_id:
:vartype family_id: str
:ivar help_document:
:vartype help_document: str
:ivar codegen_by:
:vartype codegen_by: str
:ivar arm_id:
:vartype arm_id: str
:ivar module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous", "Step",
"Draft", "Feed", "Registry", "SystemAutoCreated".
:vartype module_scope: str or ~flow.models.ModuleScope
:ivar module_entity:
:vartype module_entity: ~flow.models.ModuleEntity
:ivar input_types:
:vartype input_types: list[str]
:ivar output_types:
:vartype output_types: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar yaml_link:
:vartype yaml_link: str
:ivar yaml_link_with_commit_sha:
:vartype yaml_link_with_commit_sha: str
:ivar module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:vartype module_source_type: str or ~flow.models.ModuleSourceType
:ivar registered_by:
:vartype registered_by: str
:ivar versions:
:vartype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:ivar is_default_module_version:
:vartype is_default_module_version: bool
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar system_meta:
:vartype system_meta: ~flow.models.SystemMeta
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar entry:
:vartype entry: str
:ivar os_type:
:vartype os_type: str
:ivar require_gpu:
:vartype require_gpu: bool
:ivar module_python_interface:
:vartype module_python_interface: ~flow.models.ModulePythonInterface
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar run_setting_parameters:
:vartype run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:vartype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:ivar output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:vartype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:ivar yaml_str:
:vartype yaml_str: str
"""
_attribute_map = {
'existing_module_entity': {'key': 'existingModuleEntity', 'type': 'ModuleEntity'},
'status': {'key': 'status', 'type': 'str'},
'status_details': {'key': 'statusDetails', 'type': 'str'},
'error_details': {'key': 'errorDetails', 'type': '[str]'},
'serialized_module_info': {'key': 'serializedModuleInfo', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'display_name': {'key': 'displayName', 'type': 'str'},
'dict_tags': {'key': 'dictTags', 'type': '{str}'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'registry_name': {'key': 'registryName', 'type': 'str'},
'module_name': {'key': 'moduleName', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'job_type': {'key': 'jobType', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'family_id': {'key': 'familyId', 'type': 'str'},
'help_document': {'key': 'helpDocument', 'type': 'str'},
'codegen_by': {'key': 'codegenBy', 'type': 'str'},
'arm_id': {'key': 'armId', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_entity': {'key': 'moduleEntity', 'type': 'ModuleEntity'},
'input_types': {'key': 'inputTypes', 'type': '[str]'},
'output_types': {'key': 'outputTypes', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'yaml_link': {'key': 'yamlLink', 'type': 'str'},
'yaml_link_with_commit_sha': {'key': 'yamlLinkWithCommitSha', 'type': 'str'},
'module_source_type': {'key': 'moduleSourceType', 'type': 'str'},
'registered_by': {'key': 'registeredBy', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[AzureMLModuleVersionDescriptor]'},
'is_default_module_version': {'key': 'isDefaultModuleVersion', 'type': 'bool'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'system_meta': {'key': 'systemMeta', 'type': 'SystemMeta'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'module_python_interface': {'key': 'modulePythonInterface', 'type': 'ModulePythonInterface'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'run_setting_parameters': {'key': 'runSettingParameters', 'type': '[RunSettingParameter]'},
'supported_ui_input_data_delivery_modes': {'key': 'supportedUIInputDataDeliveryModes', 'type': '{[str]}'},
'output_setting_specs': {'key': 'outputSettingSpecs', 'type': '{OutputSettingSpec}'},
'yaml_str': {'key': 'yamlStr', 'type': 'str'},
}
def __init__(
self,
*,
existing_module_entity: Optional["ModuleEntity"] = None,
status: Optional[Union[str, "ModuleInfoFromYamlStatusEnum"]] = None,
status_details: Optional[str] = None,
error_details: Optional[List[str]] = None,
serialized_module_info: Optional[str] = None,
namespace: Optional[str] = None,
tags: Optional[List[str]] = None,
display_name: Optional[str] = None,
dict_tags: Optional[Dict[str, str]] = None,
module_version_id: Optional[str] = None,
feed_name: Optional[str] = None,
registry_name: Optional[str] = None,
module_name: Optional[str] = None,
module_version: Optional[str] = None,
description: Optional[str] = None,
owner: Optional[str] = None,
job_type: Optional[str] = None,
default_version: Optional[str] = None,
family_id: Optional[str] = None,
help_document: Optional[str] = None,
codegen_by: Optional[str] = None,
arm_id: Optional[str] = None,
module_scope: Optional[Union[str, "ModuleScope"]] = None,
module_entity: Optional["ModuleEntity"] = None,
input_types: Optional[List[str]] = None,
output_types: Optional[List[str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
yaml_link: Optional[str] = None,
yaml_link_with_commit_sha: Optional[str] = None,
module_source_type: Optional[Union[str, "ModuleSourceType"]] = None,
registered_by: Optional[str] = None,
versions: Optional[List["AzureMLModuleVersionDescriptor"]] = None,
is_default_module_version: Optional[bool] = None,
system_data: Optional["SystemData"] = None,
system_meta: Optional["SystemMeta"] = None,
snapshot_id: Optional[str] = None,
entry: Optional[str] = None,
os_type: Optional[str] = None,
require_gpu: Optional[bool] = None,
module_python_interface: Optional["ModulePythonInterface"] = None,
environment_asset_id: Optional[str] = None,
run_setting_parameters: Optional[List["RunSettingParameter"]] = None,
supported_ui_input_data_delivery_modes: Optional[Dict[str, List[Union[str, "UIInputDataDeliveryMode"]]]] = None,
output_setting_specs: Optional[Dict[str, "OutputSettingSpec"]] = None,
yaml_str: Optional[str] = None,
**kwargs
):
"""
:keyword existing_module_entity:
:paramtype existing_module_entity: ~flow.models.ModuleEntity
:keyword status: Possible values include: "NewModule", "NewVersion", "Conflict", "ParseError",
"ProcessRequestError".
:paramtype status: str or ~flow.models.ModuleInfoFromYamlStatusEnum
:keyword status_details:
:paramtype status_details: str
:keyword error_details:
:paramtype error_details: list[str]
:keyword serialized_module_info:
:paramtype serialized_module_info: str
:keyword namespace:
:paramtype namespace: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword display_name:
:paramtype display_name: str
:keyword dict_tags: Dictionary of :code:`<string>`.
:paramtype dict_tags: dict[str, str]
:keyword module_version_id:
:paramtype module_version_id: str
:keyword feed_name:
:paramtype feed_name: str
:keyword registry_name:
:paramtype registry_name: str
:keyword module_name:
:paramtype module_name: str
:keyword module_version:
:paramtype module_version: str
:keyword description:
:paramtype description: str
:keyword owner:
:paramtype owner: str
:keyword job_type:
:paramtype job_type: str
:keyword default_version:
:paramtype default_version: str
:keyword family_id:
:paramtype family_id: str
:keyword help_document:
:paramtype help_document: str
:keyword codegen_by:
:paramtype codegen_by: str
:keyword arm_id:
:paramtype arm_id: str
:keyword module_scope: Possible values include: "All", "Global", "Workspace", "Anonymous",
"Step", "Draft", "Feed", "Registry", "SystemAutoCreated".
:paramtype module_scope: str or ~flow.models.ModuleScope
:keyword module_entity:
:paramtype module_entity: ~flow.models.ModuleEntity
:keyword input_types:
:paramtype input_types: list[str]
:keyword output_types:
:paramtype output_types: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword yaml_link:
:paramtype yaml_link: str
:keyword yaml_link_with_commit_sha:
:paramtype yaml_link_with_commit_sha: str
:keyword module_source_type: Possible values include: "Unknown", "Local", "GithubFile",
"GithubFolder", "DevopsArtifactsZip", "SerializedModuleInfo".
:paramtype module_source_type: str or ~flow.models.ModuleSourceType
:keyword registered_by:
:paramtype registered_by: str
:keyword versions:
:paramtype versions: list[~flow.models.AzureMLModuleVersionDescriptor]
:keyword is_default_module_version:
:paramtype is_default_module_version: bool
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword system_meta:
:paramtype system_meta: ~flow.models.SystemMeta
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword entry:
:paramtype entry: str
:keyword os_type:
:paramtype os_type: str
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword module_python_interface:
:paramtype module_python_interface: ~flow.models.ModulePythonInterface
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword run_setting_parameters:
:paramtype run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword supported_ui_input_data_delivery_modes: Dictionary of
<components·8o5zaj·schemas·moduledtowithvalidatestatus·properties·supporteduiinputdatadeliverymodes·additionalproperties>.
:paramtype supported_ui_input_data_delivery_modes: dict[str, list[str or
~flow.models.UIInputDataDeliveryMode]]
:keyword output_setting_specs: Dictionary of :code:`<OutputSettingSpec>`.
:paramtype output_setting_specs: dict[str, ~flow.models.OutputSettingSpec]
:keyword yaml_str:
:paramtype yaml_str: str
"""
super(ModuleDtoWithValidateStatus, self).__init__(**kwargs)
self.existing_module_entity = existing_module_entity
self.status = status
self.status_details = status_details
self.error_details = error_details
self.serialized_module_info = serialized_module_info
self.namespace = namespace
self.tags = tags
self.display_name = display_name
self.dict_tags = dict_tags
self.module_version_id = module_version_id
self.feed_name = feed_name
self.registry_name = registry_name
self.module_name = module_name
self.module_version = module_version
self.description = description
self.owner = owner
self.job_type = job_type
self.default_version = default_version
self.family_id = family_id
self.help_document = help_document
self.codegen_by = codegen_by
self.arm_id = arm_id
self.module_scope = module_scope
self.module_entity = module_entity
self.input_types = input_types
self.output_types = output_types
self.entity_status = entity_status
self.created_date = created_date
self.last_modified_date = last_modified_date
self.yaml_link = yaml_link
self.yaml_link_with_commit_sha = yaml_link_with_commit_sha
self.module_source_type = module_source_type
self.registered_by = registered_by
self.versions = versions
self.is_default_module_version = is_default_module_version
self.system_data = system_data
self.system_meta = system_meta
self.snapshot_id = snapshot_id
self.entry = entry
self.os_type = os_type
self.require_gpu = require_gpu
self.module_python_interface = module_python_interface
self.environment_asset_id = environment_asset_id
self.run_setting_parameters = run_setting_parameters
self.supported_ui_input_data_delivery_modes = supported_ui_input_data_delivery_modes
self.output_setting_specs = output_setting_specs
self.yaml_str = yaml_str
class ModuleEntity(msrest.serialization.Model):
"""ModuleEntity.
:ivar display_name:
:vartype display_name: str
:ivar module_execution_type:
:vartype module_execution_type: str
:ivar module_type: Possible values include: "None", "BatchInferencing".
:vartype module_type: str or ~flow.models.ModuleType
:ivar module_type_version:
:vartype module_type_version: str
:ivar upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:vartype upload_state: str or ~flow.models.UploadState
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar structured_interface:
:vartype structured_interface: ~flow.models.StructuredInterface
:ivar data_location:
:vartype data_location: ~flow.models.DataLocation
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar runconfig:
:vartype runconfig: str
:ivar cloud_settings:
:vartype cloud_settings: ~flow.models.CloudSettings
:ivar category:
:vartype category: str
:ivar step_type:
:vartype step_type: str
:ivar stage:
:vartype stage: str
:ivar name:
:vartype name: str
:ivar hash:
:vartype hash: str
:ivar description:
:vartype description: str
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'module_execution_type': {'key': 'moduleExecutionType', 'type': 'str'},
'module_type': {'key': 'moduleType', 'type': 'str'},
'module_type_version': {'key': 'moduleTypeVersion', 'type': 'str'},
'upload_state': {'key': 'uploadState', 'type': 'str'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'structured_interface': {'key': 'structuredInterface', 'type': 'StructuredInterface'},
'data_location': {'key': 'dataLocation', 'type': 'DataLocation'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'identifierHashV2', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'runconfig': {'key': 'runconfig', 'type': 'str'},
'cloud_settings': {'key': 'cloudSettings', 'type': 'CloudSettings'},
'category': {'key': 'category', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'stage': {'key': 'stage', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'hash': {'key': 'hash', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
display_name: Optional[str] = None,
module_execution_type: Optional[str] = None,
module_type: Optional[Union[str, "ModuleType"]] = None,
module_type_version: Optional[str] = None,
upload_state: Optional[Union[str, "UploadState"]] = None,
is_deterministic: Optional[bool] = None,
structured_interface: Optional["StructuredInterface"] = None,
data_location: Optional["DataLocation"] = None,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
created_by: Optional["CreatedBy"] = None,
last_updated_by: Optional["CreatedBy"] = None,
runconfig: Optional[str] = None,
cloud_settings: Optional["CloudSettings"] = None,
category: Optional[str] = None,
step_type: Optional[str] = None,
stage: Optional[str] = None,
name: Optional[str] = None,
hash: Optional[str] = None,
description: Optional[str] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword module_execution_type:
:paramtype module_execution_type: str
:keyword module_type: Possible values include: "None", "BatchInferencing".
:paramtype module_type: str or ~flow.models.ModuleType
:keyword module_type_version:
:paramtype module_type_version: str
:keyword upload_state: Possible values include: "Uploading", "Completed", "Canceled", "Failed".
:paramtype upload_state: str or ~flow.models.UploadState
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword structured_interface:
:paramtype structured_interface: ~flow.models.StructuredInterface
:keyword data_location:
:paramtype data_location: ~flow.models.DataLocation
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword runconfig:
:paramtype runconfig: str
:keyword cloud_settings:
:paramtype cloud_settings: ~flow.models.CloudSettings
:keyword category:
:paramtype category: str
:keyword step_type:
:paramtype step_type: str
:keyword stage:
:paramtype stage: str
:keyword name:
:paramtype name: str
:keyword hash:
:paramtype hash: str
:keyword description:
:paramtype description: str
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(ModuleEntity, self).__init__(**kwargs)
self.display_name = display_name
self.module_execution_type = module_execution_type
self.module_type = module_type
self.module_type_version = module_type_version
self.upload_state = upload_state
self.is_deterministic = is_deterministic
self.structured_interface = structured_interface
self.data_location = data_location
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
self.tags = tags
self.properties = properties
self.created_by = created_by
self.last_updated_by = last_updated_by
self.runconfig = runconfig
self.cloud_settings = cloud_settings
self.category = category
self.step_type = step_type
self.stage = stage
self.name = name
self.hash = hash
self.description = description
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class ModulePythonInterface(msrest.serialization.Model):
"""ModulePythonInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.PythonInterfaceMapping]
:ivar outputs:
:vartype outputs: list[~flow.models.PythonInterfaceMapping]
:ivar parameters:
:vartype parameters: list[~flow.models.PythonInterfaceMapping]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[PythonInterfaceMapping]'},
'outputs': {'key': 'outputs', 'type': '[PythonInterfaceMapping]'},
'parameters': {'key': 'parameters', 'type': '[PythonInterfaceMapping]'},
}
def __init__(
self,
*,
inputs: Optional[List["PythonInterfaceMapping"]] = None,
outputs: Optional[List["PythonInterfaceMapping"]] = None,
parameters: Optional[List["PythonInterfaceMapping"]] = None,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.PythonInterfaceMapping]
:keyword outputs:
:paramtype outputs: list[~flow.models.PythonInterfaceMapping]
:keyword parameters:
:paramtype parameters: list[~flow.models.PythonInterfaceMapping]
"""
super(ModulePythonInterface, self).__init__(**kwargs)
self.inputs = inputs
self.outputs = outputs
self.parameters = parameters
class MpiConfiguration(msrest.serialization.Model):
"""MpiConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
*,
process_count_per_node: Optional[int] = None,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(MpiConfiguration, self).__init__(**kwargs)
self.process_count_per_node = process_count_per_node
class NCrossValidations(msrest.serialization.Model):
"""NCrossValidations.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.NCrossValidationMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "NCrossValidationMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.NCrossValidationMode
:keyword value:
:paramtype value: int
"""
super(NCrossValidations, self).__init__(**kwargs)
self.mode = mode
self.value = value
class Node(msrest.serialization.Model):
"""Node.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar source:
:vartype source: ~flow.models.NodeSource
:ivar inputs: Dictionary of :code:`<any>`.
:vartype inputs: dict[str, any]
:ivar tool:
:vartype tool: str
:ivar reduce:
:vartype reduce: bool
:ivar activate:
:vartype activate: ~flow.models.Activate
:ivar comment:
:vartype comment: str
:ivar api:
:vartype api: str
:ivar provider:
:vartype provider: str
:ivar connection:
:vartype connection: str
:ivar module:
:vartype module: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'NodeSource'},
'inputs': {'key': 'inputs', 'type': '{object}'},
'tool': {'key': 'tool', 'type': 'str'},
'reduce': {'key': 'reduce', 'type': 'bool'},
'activate': {'key': 'activate', 'type': 'Activate'},
'comment': {'key': 'comment', 'type': 'str'},
'api': {'key': 'api', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'connection': {'key': 'connection', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "ToolType"]] = None,
source: Optional["NodeSource"] = None,
inputs: Optional[Dict[str, Any]] = None,
tool: Optional[str] = None,
reduce: Optional[bool] = None,
activate: Optional["Activate"] = None,
comment: Optional[str] = None,
api: Optional[str] = None,
provider: Optional[str] = None,
connection: Optional[str] = None,
module: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword source:
:paramtype source: ~flow.models.NodeSource
:keyword inputs: Dictionary of :code:`<any>`.
:paramtype inputs: dict[str, any]
:keyword tool:
:paramtype tool: str
:keyword reduce:
:paramtype reduce: bool
:keyword activate:
:paramtype activate: ~flow.models.Activate
:keyword comment:
:paramtype comment: str
:keyword api:
:paramtype api: str
:keyword provider:
:paramtype provider: str
:keyword connection:
:paramtype connection: str
:keyword module:
:paramtype module: str
"""
super(Node, self).__init__(**kwargs)
self.name = name
self.type = type
self.source = source
self.inputs = inputs
self.tool = tool
self.reduce = reduce
self.activate = activate
self.comment = comment
self.api = api
self.provider = provider
self.connection = connection
self.module = module
class NodeInputPort(msrest.serialization.Model):
"""NodeInputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_types_ids:
:vartype data_types_ids: list[str]
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_types_ids': {'key': 'dataTypesIds', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
data_types_ids: Optional[List[str]] = None,
is_optional: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_types_ids:
:paramtype data_types_ids: list[str]
:keyword is_optional:
:paramtype is_optional: bool
"""
super(NodeInputPort, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.data_types_ids = data_types_ids
self.is_optional = is_optional
class NodeLayout(msrest.serialization.Model):
"""NodeLayout.
:ivar x:
:vartype x: float
:ivar y:
:vartype y: float
:ivar width:
:vartype width: float
:ivar height:
:vartype height: float
:ivar extended_data:
:vartype extended_data: str
"""
_attribute_map = {
'x': {'key': 'x', 'type': 'float'},
'y': {'key': 'y', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
'extended_data': {'key': 'extendedData', 'type': 'str'},
}
def __init__(
self,
*,
x: Optional[float] = None,
y: Optional[float] = None,
width: Optional[float] = None,
height: Optional[float] = None,
extended_data: Optional[str] = None,
**kwargs
):
"""
:keyword x:
:paramtype x: float
:keyword y:
:paramtype y: float
:keyword width:
:paramtype width: float
:keyword height:
:paramtype height: float
:keyword extended_data:
:paramtype extended_data: str
"""
super(NodeLayout, self).__init__(**kwargs)
self.x = x
self.y = y
self.width = width
self.height = height
self.extended_data = extended_data
class NodeOutputPort(msrest.serialization.Model):
"""NodeOutputPort.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_input_name:
:vartype pass_through_input_name: str
:ivar early_available:
:vartype early_available: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_input_name': {'key': 'passThroughInputName', 'type': 'str'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
data_type_id: Optional[str] = None,
pass_through_input_name: Optional[str] = None,
early_available: Optional[bool] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_input_name:
:paramtype pass_through_input_name: str
:keyword early_available:
:paramtype early_available: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
"""
super(NodeOutputPort, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.data_type_id = data_type_id
self.pass_through_input_name = pass_through_input_name
self.early_available = early_available
self.data_store_mode = data_store_mode
class NodePortInterface(msrest.serialization.Model):
"""NodePortInterface.
:ivar inputs:
:vartype inputs: list[~flow.models.NodeInputPort]
:ivar outputs:
:vartype outputs: list[~flow.models.NodeOutputPort]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
"""
_attribute_map = {
'inputs': {'key': 'inputs', 'type': '[NodeInputPort]'},
'outputs': {'key': 'outputs', 'type': '[NodeOutputPort]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
}
def __init__(
self,
*,
inputs: Optional[List["NodeInputPort"]] = None,
outputs: Optional[List["NodeOutputPort"]] = None,
control_outputs: Optional[List["ControlOutput"]] = None,
**kwargs
):
"""
:keyword inputs:
:paramtype inputs: list[~flow.models.NodeInputPort]
:keyword outputs:
:paramtype outputs: list[~flow.models.NodeOutputPort]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
"""
super(NodePortInterface, self).__init__(**kwargs)
self.inputs = inputs
self.outputs = outputs
self.control_outputs = control_outputs
class Nodes(msrest.serialization.Model):
"""Nodes.
All required parameters must be populated in order to send to Azure.
:ivar nodes_value_type: Required. Possible values include: "All", "Custom".
:vartype nodes_value_type: str or ~flow.models.NodesValueType
:ivar values:
:vartype values: list[int]
"""
_validation = {
'nodes_value_type': {'required': True},
}
_attribute_map = {
'nodes_value_type': {'key': 'nodes_value_type', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
*,
nodes_value_type: Union[str, "NodesValueType"],
values: Optional[List[int]] = None,
**kwargs
):
"""
:keyword nodes_value_type: Required. Possible values include: "All", "Custom".
:paramtype nodes_value_type: str or ~flow.models.NodesValueType
:keyword values:
:paramtype values: list[int]
"""
super(Nodes, self).__init__(**kwargs)
self.nodes_value_type = nodes_value_type
self.values = values
class NodeSource(msrest.serialization.Model):
"""NodeSource.
:ivar type:
:vartype type: str
:ivar tool:
:vartype tool: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tool': {'key': 'tool', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[str] = None,
tool: Optional[str] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword type:
:paramtype type: str
:keyword tool:
:paramtype tool: str
:keyword path:
:paramtype path: str
"""
super(NodeSource, self).__init__(**kwargs)
self.type = type
self.tool = tool
self.path = path
class NodeTelemetryMetaInfo(msrest.serialization.Model):
"""NodeTelemetryMetaInfo.
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar version_id:
:vartype version_id: str
:ivar node_type:
:vartype node_type: str
:ivar node_source:
:vartype node_source: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar is_pipeline_component:
:vartype is_pipeline_component: bool
"""
_attribute_map = {
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'version_id': {'key': 'versionId', 'type': 'str'},
'node_type': {'key': 'nodeType', 'type': 'str'},
'node_source': {'key': 'nodeSource', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'is_pipeline_component': {'key': 'isPipelineComponent', 'type': 'bool'},
}
def __init__(
self,
*,
pipeline_run_id: Optional[str] = None,
node_id: Optional[str] = None,
version_id: Optional[str] = None,
node_type: Optional[str] = None,
node_source: Optional[str] = None,
is_anonymous: Optional[bool] = None,
is_pipeline_component: Optional[bool] = None,
**kwargs
):
"""
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword version_id:
:paramtype version_id: str
:keyword node_type:
:paramtype node_type: str
:keyword node_source:
:paramtype node_source: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword is_pipeline_component:
:paramtype is_pipeline_component: bool
"""
super(NodeTelemetryMetaInfo, self).__init__(**kwargs)
self.pipeline_run_id = pipeline_run_id
self.node_id = node_id
self.version_id = version_id
self.node_type = node_type
self.node_source = node_source
self.is_anonymous = is_anonymous
self.is_pipeline_component = is_pipeline_component
class NodeVariant(msrest.serialization.Model):
"""NodeVariant.
:ivar variants: This is a dictionary.
:vartype variants: dict[str, ~flow.models.VariantNode]
:ivar default_variant_id:
:vartype default_variant_id: str
"""
_attribute_map = {
'variants': {'key': 'variants', 'type': '{VariantNode}'},
'default_variant_id': {'key': 'defaultVariantId', 'type': 'str'},
}
def __init__(
self,
*,
variants: Optional[Dict[str, "VariantNode"]] = None,
default_variant_id: Optional[str] = None,
**kwargs
):
"""
:keyword variants: This is a dictionary.
:paramtype variants: dict[str, ~flow.models.VariantNode]
:keyword default_variant_id:
:paramtype default_variant_id: str
"""
super(NodeVariant, self).__init__(**kwargs)
self.variants = variants
self.default_variant_id = default_variant_id
class NoteBookTaskDto(msrest.serialization.Model):
"""NoteBookTaskDto.
:ivar notebook_path:
:vartype notebook_path: str
:ivar base_parameters: Dictionary of :code:`<string>`.
:vartype base_parameters: dict[str, str]
"""
_attribute_map = {
'notebook_path': {'key': 'notebook_path', 'type': 'str'},
'base_parameters': {'key': 'base_parameters', 'type': '{str}'},
}
def __init__(
self,
*,
notebook_path: Optional[str] = None,
base_parameters: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword notebook_path:
:paramtype notebook_path: str
:keyword base_parameters: Dictionary of :code:`<string>`.
:paramtype base_parameters: dict[str, str]
"""
super(NoteBookTaskDto, self).__init__(**kwargs)
self.notebook_path = notebook_path
self.base_parameters = base_parameters
class NotificationSetting(msrest.serialization.Model):
"""NotificationSetting.
:ivar emails:
:vartype emails: list[str]
:ivar email_on:
:vartype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:ivar webhooks: Dictionary of :code:`<Webhook>`.
:vartype webhooks: dict[str, ~flow.models.Webhook]
"""
_attribute_map = {
'emails': {'key': 'emails', 'type': '[str]'},
'email_on': {'key': 'emailOn', 'type': '[str]'},
'webhooks': {'key': 'webhooks', 'type': '{Webhook}'},
}
def __init__(
self,
*,
emails: Optional[List[str]] = None,
email_on: Optional[List[Union[str, "EmailNotificationEnableType"]]] = None,
webhooks: Optional[Dict[str, "Webhook"]] = None,
**kwargs
):
"""
:keyword emails:
:paramtype emails: list[str]
:keyword email_on:
:paramtype email_on: list[str or ~flow.models.EmailNotificationEnableType]
:keyword webhooks: Dictionary of :code:`<Webhook>`.
:paramtype webhooks: dict[str, ~flow.models.Webhook]
"""
super(NotificationSetting, self).__init__(**kwargs)
self.emails = emails
self.email_on = email_on
self.webhooks = webhooks
class ODataError(msrest.serialization.Model):
"""Represents OData v4 error object.
:ivar code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
:ivar details: Gets or sets additional details about the error.
:vartype details: list[~flow.models.ODataErrorDetail]
:ivar innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:vartype innererror: ~flow.models.ODataInnerError
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ODataErrorDetail]'},
'innererror': {'key': 'innererror', 'type': 'ODataInnerError'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
details: Optional[List["ODataErrorDetail"]] = None,
innererror: Optional["ODataInnerError"] = None,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
This code serves as a sub-status for the HTTP error code specified
in the response.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
The ``Content-Language`` header MUST contain the language code from [RFC5646]
corresponding to the language in which the value for message is written.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
:keyword details: Gets or sets additional details about the error.
:paramtype details: list[~flow.models.ODataErrorDetail]
:keyword innererror: The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:paramtype innererror: ~flow.models.ODataInnerError
"""
super(ODataError, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
self.innererror = innererror
class ODataErrorDetail(msrest.serialization.Model):
"""Represents additional error details.
:ivar code: Gets or sets a language-independent, service-defined error code.
:vartype code: str
:ivar message: Gets or sets a human-readable, language-dependent representation of the error.
:vartype message: str
:ivar target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:vartype target: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
**kwargs
):
"""
:keyword code: Gets or sets a language-independent, service-defined error code.
:paramtype code: str
:keyword message: Gets or sets a human-readable, language-dependent representation of the
error.
:paramtype message: str
:keyword target: Gets or sets the target of the particular error
(for example, the name of the property in error).
:paramtype target: str
"""
super(ODataErrorDetail, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
class ODataErrorResponse(msrest.serialization.Model):
"""Represents OData v4 compliant error response message.
:ivar error: Represents OData v4 error object.
:vartype error: ~flow.models.ODataError
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ODataError'},
}
def __init__(
self,
*,
error: Optional["ODataError"] = None,
**kwargs
):
"""
:keyword error: Represents OData v4 error object.
:paramtype error: ~flow.models.ODataError
"""
super(ODataErrorResponse, self).__init__(**kwargs)
self.error = error
class ODataInnerError(msrest.serialization.Model):
"""The contents of this object are service-defined.
Usually this object contains information that will help debug the service
and SHOULD only be used in development environments in order to guard
against potential security concerns around information disclosure.
:ivar client_request_id: Gets or sets the client provided request ID.
:vartype client_request_id: str
:ivar service_request_id: Gets or sets the server generated request ID.
:vartype service_request_id: str
:ivar trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype trace: str
:ivar context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:vartype context: str
"""
_attribute_map = {
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'service_request_id': {'key': 'serviceRequestId', 'type': 'str'},
'trace': {'key': 'trace', 'type': 'str'},
'context': {'key': 'context', 'type': 'str'},
}
def __init__(
self,
*,
client_request_id: Optional[str] = None,
service_request_id: Optional[str] = None,
trace: Optional[str] = None,
context: Optional[str] = None,
**kwargs
):
"""
:keyword client_request_id: Gets or sets the client provided request ID.
:paramtype client_request_id: str
:keyword service_request_id: Gets or sets the server generated request ID.
:paramtype service_request_id: str
:keyword trace: Gets or sets the exception stack trace.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype trace: str
:keyword context: Gets or sets additional context for the exception.
DO NOT INCLUDE IT IN PRODUCTION ENVIRONMENT.
:paramtype context: str
"""
super(ODataInnerError, self).__init__(**kwargs)
self.client_request_id = client_request_id
self.service_request_id = service_request_id
self.trace = trace
self.context = context
class OutputData(msrest.serialization.Model):
"""OutputData.
:ivar output_location:
:vartype output_location: ~flow.models.ExecutionDataLocation
:ivar mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:vartype mechanism: str or ~flow.models.OutputMechanism
:ivar additional_options:
:vartype additional_options: ~flow.models.OutputOptions
:ivar environment_variable_name:
:vartype environment_variable_name: str
"""
_attribute_map = {
'output_location': {'key': 'outputLocation', 'type': 'ExecutionDataLocation'},
'mechanism': {'key': 'mechanism', 'type': 'str'},
'additional_options': {'key': 'additionalOptions', 'type': 'OutputOptions'},
'environment_variable_name': {'key': 'environmentVariableName', 'type': 'str'},
}
def __init__(
self,
*,
output_location: Optional["ExecutionDataLocation"] = None,
mechanism: Optional[Union[str, "OutputMechanism"]] = None,
additional_options: Optional["OutputOptions"] = None,
environment_variable_name: Optional[str] = None,
**kwargs
):
"""
:keyword output_location:
:paramtype output_location: ~flow.models.ExecutionDataLocation
:keyword mechanism: Possible values include: "Upload", "Mount", "Hdfs", "Link", "Direct".
:paramtype mechanism: str or ~flow.models.OutputMechanism
:keyword additional_options:
:paramtype additional_options: ~flow.models.OutputOptions
:keyword environment_variable_name:
:paramtype environment_variable_name: str
"""
super(OutputData, self).__init__(**kwargs)
self.output_location = output_location
self.mechanism = mechanism
self.additional_options = additional_options
self.environment_variable_name = environment_variable_name
class OutputDataBinding(msrest.serialization.Model):
"""OutputDataBinding.
:ivar datastore_id:
:vartype datastore_id: str
:ivar path_on_datastore:
:vartype path_on_datastore: str
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar description:
:vartype description: str
:ivar uri:
:vartype uri: ~flow.models.MfeInternalUriReference
:ivar mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:vartype mode: str or ~flow.models.DataBindingMode
:ivar asset_uri:
:vartype asset_uri: str
:ivar is_asset_job_output:
:vartype is_asset_job_output: bool
:ivar job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:vartype job_output_type: str or ~flow.models.JobOutputType
:ivar asset_name:
:vartype asset_name: str
:ivar asset_version:
:vartype asset_version: str
:ivar auto_delete_setting:
:vartype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
_attribute_map = {
'datastore_id': {'key': 'datastoreId', 'type': 'str'},
'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'MfeInternalUriReference'},
'mode': {'key': 'mode', 'type': 'str'},
'asset_uri': {'key': 'assetUri', 'type': 'str'},
'is_asset_job_output': {'key': 'isAssetJobOutput', 'type': 'bool'},
'job_output_type': {'key': 'jobOutputType', 'type': 'str'},
'asset_name': {'key': 'assetName', 'type': 'str'},
'asset_version': {'key': 'assetVersion', 'type': 'str'},
'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'},
}
def __init__(
self,
*,
datastore_id: Optional[str] = None,
path_on_datastore: Optional[str] = None,
path_on_compute: Optional[str] = None,
description: Optional[str] = None,
uri: Optional["MfeInternalUriReference"] = None,
mode: Optional[Union[str, "DataBindingMode"]] = None,
asset_uri: Optional[str] = None,
is_asset_job_output: Optional[bool] = None,
job_output_type: Optional[Union[str, "JobOutputType"]] = None,
asset_name: Optional[str] = None,
asset_version: Optional[str] = None,
auto_delete_setting: Optional["AutoDeleteSetting"] = None,
**kwargs
):
"""
:keyword datastore_id:
:paramtype datastore_id: str
:keyword path_on_datastore:
:paramtype path_on_datastore: str
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword description:
:paramtype description: str
:keyword uri:
:paramtype uri: ~flow.models.MfeInternalUriReference
:keyword mode: Possible values include: "Mount", "Download", "Upload", "ReadOnlyMount",
"ReadWriteMount", "Direct", "EvalMount", "EvalDownload".
:paramtype mode: str or ~flow.models.DataBindingMode
:keyword asset_uri:
:paramtype asset_uri: str
:keyword is_asset_job_output:
:paramtype is_asset_job_output: bool
:keyword job_output_type: Possible values include: "Uri", "Dataset", "UriFile", "UriFolder",
"MLTable", "CustomModel", "MLFlowModel", "TritonModel".
:paramtype job_output_type: str or ~flow.models.JobOutputType
:keyword asset_name:
:paramtype asset_name: str
:keyword asset_version:
:paramtype asset_version: str
:keyword auto_delete_setting:
:paramtype auto_delete_setting: ~flow.models.AutoDeleteSetting
"""
super(OutputDataBinding, self).__init__(**kwargs)
self.datastore_id = datastore_id
self.path_on_datastore = path_on_datastore
self.path_on_compute = path_on_compute
self.description = description
self.uri = uri
self.mode = mode
self.asset_uri = asset_uri
self.is_asset_job_output = is_asset_job_output
self.job_output_type = job_output_type
self.asset_name = asset_name
self.asset_version = asset_version
self.auto_delete_setting = auto_delete_setting
class OutputDatasetLineage(msrest.serialization.Model):
"""OutputDatasetLineage.
:ivar identifier:
:vartype identifier: ~flow.models.DatasetIdentifier
:ivar output_type: Possible values include: "RunOutput", "Reference".
:vartype output_type: str or ~flow.models.DatasetOutputType
:ivar output_details:
:vartype output_details: ~flow.models.DatasetOutputDetails
"""
_attribute_map = {
'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
'output_type': {'key': 'outputType', 'type': 'str'},
'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'},
}
def __init__(
self,
*,
identifier: Optional["DatasetIdentifier"] = None,
output_type: Optional[Union[str, "DatasetOutputType"]] = None,
output_details: Optional["DatasetOutputDetails"] = None,
**kwargs
):
"""
:keyword identifier:
:paramtype identifier: ~flow.models.DatasetIdentifier
:keyword output_type: Possible values include: "RunOutput", "Reference".
:paramtype output_type: str or ~flow.models.DatasetOutputType
:keyword output_details:
:paramtype output_details: ~flow.models.DatasetOutputDetails
"""
super(OutputDatasetLineage, self).__init__(**kwargs)
self.identifier = identifier
self.output_type = output_type
self.output_details = output_details
class OutputDefinition(msrest.serialization.Model):
"""OutputDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: list[str or ~flow.models.ValueType]
:ivar description:
:vartype description: str
:ivar is_property:
:vartype is_property: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': '[str]'},
'description': {'key': 'description', 'type': 'str'},
'is_property': {'key': 'isProperty', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[List[Union[str, "ValueType"]]] = None,
description: Optional[str] = None,
is_property: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: list[str or ~flow.models.ValueType]
:keyword description:
:paramtype description: str
:keyword is_property:
:paramtype is_property: bool
"""
super(OutputDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.description = description
self.is_property = is_property
class OutputOptions(msrest.serialization.Model):
"""OutputOptions.
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar registration_options:
:vartype registration_options: ~flow.models.RegistrationOptions
:ivar upload_options:
:vartype upload_options: ~flow.models.UploadOptions
:ivar mount_options: Dictionary of :code:`<string>`.
:vartype mount_options: dict[str, str]
"""
_attribute_map = {
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'registration_options': {'key': 'registrationOptions', 'type': 'RegistrationOptions'},
'upload_options': {'key': 'uploadOptions', 'type': 'UploadOptions'},
'mount_options': {'key': 'mountOptions', 'type': '{str}'},
}
def __init__(
self,
*,
path_on_compute: Optional[str] = None,
registration_options: Optional["RegistrationOptions"] = None,
upload_options: Optional["UploadOptions"] = None,
mount_options: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword registration_options:
:paramtype registration_options: ~flow.models.RegistrationOptions
:keyword upload_options:
:paramtype upload_options: ~flow.models.UploadOptions
:keyword mount_options: Dictionary of :code:`<string>`.
:paramtype mount_options: dict[str, str]
"""
super(OutputOptions, self).__init__(**kwargs)
self.path_on_compute = path_on_compute
self.registration_options = registration_options
self.upload_options = upload_options
self.mount_options = mount_options
class OutputSetting(msrest.serialization.Model):
"""OutputSetting.
:ivar name:
:vartype name: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_name_parameter_assignment:
:vartype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar data_store_mode_parameter_assignment:
:vartype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar path_on_compute_parameter_assignment:
:vartype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar web_service_port:
:vartype web_service_port: str
:ivar dataset_registration:
:vartype dataset_registration: ~flow.models.DatasetRegistration
:ivar dataset_output_options:
:vartype dataset_output_options: ~flow.models.DatasetOutputOptions
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar parameter_name:
:vartype parameter_name: str
:ivar asset_output_settings_parameter_name:
:vartype asset_output_settings_parameter_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_name_parameter_assignment': {'key': 'DataStoreNameParameterAssignment', 'type': 'ParameterAssignment'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'data_store_mode_parameter_assignment': {'key': 'DataStoreModeParameterAssignment', 'type': 'ParameterAssignment'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'path_on_compute_parameter_assignment': {'key': 'PathOnComputeParameterAssignment', 'type': 'ParameterAssignment'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
'dataset_registration': {'key': 'datasetRegistration', 'type': 'DatasetRegistration'},
'dataset_output_options': {'key': 'datasetOutputOptions', 'type': 'DatasetOutputOptions'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'asset_output_settings_parameter_name': {'key': 'AssetOutputSettingsParameterName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
data_store_name: Optional[str] = None,
data_store_name_parameter_assignment: Optional["ParameterAssignment"] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
data_store_mode_parameter_assignment: Optional["ParameterAssignment"] = None,
path_on_compute: Optional[str] = None,
path_on_compute_parameter_assignment: Optional["ParameterAssignment"] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
web_service_port: Optional[str] = None,
dataset_registration: Optional["DatasetRegistration"] = None,
dataset_output_options: Optional["DatasetOutputOptions"] = None,
asset_output_settings: Optional["AssetOutputSettings"] = None,
parameter_name: Optional[str] = None,
asset_output_settings_parameter_name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_name_parameter_assignment:
:paramtype data_store_name_parameter_assignment: ~flow.models.ParameterAssignment
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword data_store_mode_parameter_assignment:
:paramtype data_store_mode_parameter_assignment: ~flow.models.ParameterAssignment
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword path_on_compute_parameter_assignment:
:paramtype path_on_compute_parameter_assignment: ~flow.models.ParameterAssignment
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword web_service_port:
:paramtype web_service_port: str
:keyword dataset_registration:
:paramtype dataset_registration: ~flow.models.DatasetRegistration
:keyword dataset_output_options:
:paramtype dataset_output_options: ~flow.models.DatasetOutputOptions
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword parameter_name:
:paramtype parameter_name: str
:keyword asset_output_settings_parameter_name:
:paramtype asset_output_settings_parameter_name: str
"""
super(OutputSetting, self).__init__(**kwargs)
self.name = name
self.data_store_name = data_store_name
self.data_store_name_parameter_assignment = data_store_name_parameter_assignment
self.data_store_mode = data_store_mode
self.data_store_mode_parameter_assignment = data_store_mode_parameter_assignment
self.path_on_compute = path_on_compute
self.path_on_compute_parameter_assignment = path_on_compute_parameter_assignment
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.web_service_port = web_service_port
self.dataset_registration = dataset_registration
self.dataset_output_options = dataset_output_options
self.asset_output_settings = asset_output_settings
self.parameter_name = parameter_name
self.asset_output_settings_parameter_name = asset_output_settings_parameter_name
class OutputSettingSpec(msrest.serialization.Model):
"""OutputSettingSpec.
:ivar supported_data_store_modes:
:vartype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:ivar default_asset_output_path:
:vartype default_asset_output_path: str
"""
_attribute_map = {
'supported_data_store_modes': {'key': 'supportedDataStoreModes', 'type': '[str]'},
'default_asset_output_path': {'key': 'defaultAssetOutputPath', 'type': 'str'},
}
def __init__(
self,
*,
supported_data_store_modes: Optional[List[Union[str, "AEVADataStoreMode"]]] = None,
default_asset_output_path: Optional[str] = None,
**kwargs
):
"""
:keyword supported_data_store_modes:
:paramtype supported_data_store_modes: list[str or ~flow.models.AEVADataStoreMode]
:keyword default_asset_output_path:
:paramtype default_asset_output_path: str
"""
super(OutputSettingSpec, self).__init__(**kwargs)
self.supported_data_store_modes = supported_data_store_modes
self.default_asset_output_path = default_asset_output_path
class PaginatedDataInfoList(msrest.serialization.Model):
"""A paginated list of DataInfos.
:ivar value: An array of objects of type DataInfo.
:vartype value: list[~flow.models.DataInfo]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[DataInfo]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["DataInfo"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type DataInfo.
:paramtype value: list[~flow.models.DataInfo]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedDataInfoList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedModelDtoList(msrest.serialization.Model):
"""A paginated list of ModelDtos.
:ivar value: An array of objects of type ModelDto.
:vartype value: list[~flow.models.ModelDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModelDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ModelDto"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type ModelDto.
:paramtype value: list[~flow.models.ModelDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModelDtoList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedModuleDtoList(msrest.serialization.Model):
"""A paginated list of ModuleDtos.
:ivar value: An array of objects of type ModuleDto.
:vartype value: list[~flow.models.ModuleDto]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ModuleDto]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ModuleDto"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type ModuleDto.
:paramtype value: list[~flow.models.ModuleDto]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedModuleDtoList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedPipelineDraftSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineDraftSummarys.
:ivar value: An array of objects of type PipelineDraftSummary.
:vartype value: list[~flow.models.PipelineDraftSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineDraftSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PipelineDraftSummary"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineDraftSummary.
:paramtype value: list[~flow.models.PipelineDraftSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineDraftSummaryList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedPipelineEndpointSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineEndpointSummarys.
:ivar value: An array of objects of type PipelineEndpointSummary.
:vartype value: list[~flow.models.PipelineEndpointSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineEndpointSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PipelineEndpointSummary"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineEndpointSummary.
:paramtype value: list[~flow.models.PipelineEndpointSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineEndpointSummaryList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedPipelineRunSummaryList(msrest.serialization.Model):
"""A paginated list of PipelineRunSummarys.
:ivar value: An array of objects of type PipelineRunSummary.
:vartype value: list[~flow.models.PipelineRunSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PipelineRunSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PipelineRunSummary"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type PipelineRunSummary.
:paramtype value: list[~flow.models.PipelineRunSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPipelineRunSummaryList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class PaginatedPublishedPipelineSummaryList(msrest.serialization.Model):
"""A paginated list of PublishedPipelineSummarys.
:ivar value: An array of objects of type PublishedPipelineSummary.
:vartype value: list[~flow.models.PublishedPipelineSummary]
:ivar continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:vartype continuation_token: str
:ivar next_link: The link to the next page constructed using the continuationToken. If null,
there are no additional pages.
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublishedPipelineSummary]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["PublishedPipelineSummary"]] = None,
continuation_token: Optional[str] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value: An array of objects of type PublishedPipelineSummary.
:paramtype value: list[~flow.models.PublishedPipelineSummary]
:keyword continuation_token: The token used in retrieving the next page. If null, there are no
additional pages.
:paramtype continuation_token: str
:keyword next_link: The link to the next page constructed using the continuationToken. If
null, there are no additional pages.
:paramtype next_link: str
"""
super(PaginatedPublishedPipelineSummaryList, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.next_link = next_link
class ParallelForControlFlowInfo(msrest.serialization.Model):
"""ParallelForControlFlowInfo.
:ivar parallel_for_items_input:
:vartype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
_attribute_map = {
'parallel_for_items_input': {'key': 'parallelForItemsInput', 'type': 'ParameterAssignment'},
}
def __init__(
self,
*,
parallel_for_items_input: Optional["ParameterAssignment"] = None,
**kwargs
):
"""
:keyword parallel_for_items_input:
:paramtype parallel_for_items_input: ~flow.models.ParameterAssignment
"""
super(ParallelForControlFlowInfo, self).__init__(**kwargs)
self.parallel_for_items_input = parallel_for_items_input
class ParallelTaskConfiguration(msrest.serialization.Model):
"""ParallelTaskConfiguration.
:ivar max_retries_per_worker:
:vartype max_retries_per_worker: int
:ivar worker_count_per_node:
:vartype worker_count_per_node: int
:ivar terminal_exit_codes:
:vartype terminal_exit_codes: list[int]
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
"""
_attribute_map = {
'max_retries_per_worker': {'key': 'maxRetriesPerWorker', 'type': 'int'},
'worker_count_per_node': {'key': 'workerCountPerNode', 'type': 'int'},
'terminal_exit_codes': {'key': 'terminalExitCodes', 'type': '[int]'},
'configuration': {'key': 'configuration', 'type': '{str}'},
}
def __init__(
self,
*,
max_retries_per_worker: Optional[int] = None,
worker_count_per_node: Optional[int] = None,
terminal_exit_codes: Optional[List[int]] = None,
configuration: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword max_retries_per_worker:
:paramtype max_retries_per_worker: int
:keyword worker_count_per_node:
:paramtype worker_count_per_node: int
:keyword terminal_exit_codes:
:paramtype terminal_exit_codes: list[int]
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
"""
super(ParallelTaskConfiguration, self).__init__(**kwargs)
self.max_retries_per_worker = max_retries_per_worker
self.worker_count_per_node = worker_count_per_node
self.terminal_exit_codes = terminal_exit_codes
self.configuration = configuration
class Parameter(msrest.serialization.Model):
"""Parameter.
:ivar name:
:vartype name: str
:ivar documentation:
:vartype documentation: str
:ivar default_value:
:vartype default_value: str
:ivar is_optional:
:vartype is_optional: bool
:ivar min_max_rules:
:vartype min_max_rules: list[~flow.models.MinMaxParameterRule]
:ivar enum_rules:
:vartype enum_rules: list[~flow.models.EnumParameterRule]
:ivar type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype type: str or ~flow.models.ParameterType
:ivar label:
:vartype label: str
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'documentation': {'key': 'documentation', 'type': 'str'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'min_max_rules': {'key': 'minMaxRules', 'type': '[MinMaxParameterRule]'},
'enum_rules': {'key': 'enumRules', 'type': '[EnumParameterRule]'},
'type': {'key': 'type', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
}
def __init__(
self,
*,
name: Optional[str] = None,
documentation: Optional[str] = None,
default_value: Optional[str] = None,
is_optional: Optional[bool] = None,
min_max_rules: Optional[List["MinMaxParameterRule"]] = None,
enum_rules: Optional[List["EnumParameterRule"]] = None,
type: Optional[Union[str, "ParameterType"]] = None,
label: Optional[str] = None,
group_names: Optional[List[str]] = None,
argument_name: Optional[str] = None,
ui_hint: Optional["UIParameterHint"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword documentation:
:paramtype documentation: str
:keyword default_value:
:paramtype default_value: str
:keyword is_optional:
:paramtype is_optional: bool
:keyword min_max_rules:
:paramtype min_max_rules: list[~flow.models.MinMaxParameterRule]
:keyword enum_rules:
:paramtype enum_rules: list[~flow.models.EnumParameterRule]
:keyword type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:paramtype type: str or ~flow.models.ParameterType
:keyword label:
:paramtype label: str
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
"""
super(Parameter, self).__init__(**kwargs)
self.name = name
self.documentation = documentation
self.default_value = default_value
self.is_optional = is_optional
self.min_max_rules = min_max_rules
self.enum_rules = enum_rules
self.type = type
self.label = label
self.group_names = group_names
self.argument_name = argument_name
self.ui_hint = ui_hint
class ParameterAssignment(msrest.serialization.Model):
"""ParameterAssignment.
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
value_type: Optional[Union[str, "ParameterValueType"]] = None,
assignments_to_concatenate: Optional[List["ParameterAssignment"]] = None,
data_path_assignment: Optional["LegacyDataPath"] = None,
data_set_definition_value_assignment: Optional["DataSetDefinitionValue"] = None,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(ParameterAssignment, self).__init__(**kwargs)
self.value_type = value_type
self.assignments_to_concatenate = assignments_to_concatenate
self.data_path_assignment = data_path_assignment
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.name = name
self.value = value
class ParameterDefinition(msrest.serialization.Model):
"""ParameterDefinition.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
:ivar value:
:vartype value: str
:ivar is_optional:
:vartype is_optional: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[str] = None,
value: Optional[str] = None,
is_optional: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
:keyword value:
:paramtype value: str
:keyword is_optional:
:paramtype is_optional: bool
"""
super(ParameterDefinition, self).__init__(**kwargs)
self.name = name
self.type = type
self.value = value
self.is_optional = is_optional
class PatchFlowRequest(msrest.serialization.Model):
"""PatchFlowRequest.
:ivar flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:vartype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
"""
_attribute_map = {
'flow_patch_operation_type': {'key': 'flowPatchOperationType', 'type': 'str'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
}
def __init__(
self,
*,
flow_patch_operation_type: Optional[Union[str, "FlowPatchOperationType"]] = None,
flow_definition_file_path: Optional[str] = None,
**kwargs
):
"""
:keyword flow_patch_operation_type: Possible values include: "ArchiveFlow", "RestoreFlow",
"ExportFlowToFile".
:paramtype flow_patch_operation_type: str or ~flow.models.FlowPatchOperationType
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
"""
super(PatchFlowRequest, self).__init__(**kwargs)
self.flow_patch_operation_type = flow_patch_operation_type
self.flow_definition_file_path = flow_definition_file_path
class Pipeline(msrest.serialization.Model):
"""Pipeline.
:ivar run_id:
:vartype run_id: str
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar default_datastore_name:
:vartype default_datastore_name: str
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.PipelineInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.PipelineOutput]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'default_datastore_name': {'key': 'defaultDatastoreName', 'type': 'str'},
'component_jobs': {'key': 'componentJobs', 'type': '{ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{PipelineInput}'},
'outputs': {'key': 'outputs', 'type': '{PipelineOutput}'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
continue_run_on_step_failure: Optional[bool] = None,
default_datastore_name: Optional[str] = None,
component_jobs: Optional[Dict[str, "ComponentJob"]] = None,
inputs: Optional[Dict[str, "PipelineInput"]] = None,
outputs: Optional[Dict[str, "PipelineOutput"]] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword default_datastore_name:
:paramtype default_datastore_name: str
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.PipelineInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.PipelineOutput]
"""
super(Pipeline, self).__init__(**kwargs)
self.run_id = run_id
self.continue_run_on_step_failure = continue_run_on_step_failure
self.default_datastore_name = default_datastore_name
self.component_jobs = component_jobs
self.inputs = inputs
self.outputs = outputs
class PipelineDraft(msrest.serialization.Model):
"""PipelineDraft.
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar source_pipeline_run_id:
:vartype source_pipeline_run_id: str
:ivar latest_pipeline_run_id:
:vartype latest_pipeline_run_id: str
:ivar latest_run_experiment_name:
:vartype latest_run_experiment_name: str
:ivar latest_run_experiment_id:
:vartype latest_run_experiment_id: str
:ivar is_latest_run_experiment_archived:
:vartype is_latest_run_experiment_archived: bool
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar linked_pipelines_info:
:vartype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar studio_migration_info:
:vartype studio_migration_info: ~flow.models.StudioMigrationInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar pipeline_timeout:
:vartype pipeline_timeout: int
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'source_pipeline_run_id': {'key': 'sourcePipelineRunId', 'type': 'str'},
'latest_pipeline_run_id': {'key': 'latestPipelineRunId', 'type': 'str'},
'latest_run_experiment_name': {'key': 'latestRunExperimentName', 'type': 'str'},
'latest_run_experiment_id': {'key': 'latestRunExperimentId', 'type': 'str'},
'is_latest_run_experiment_archived': {'key': 'isLatestRunExperimentArchived', 'type': 'bool'},
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'linked_pipelines_info': {'key': 'linkedPipelinesInfo', 'type': '[LinkedPipelineInfo]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'studio_migration_info': {'key': 'studioMigrationInfo', 'type': 'StudioMigrationInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'pipeline_timeout': {'key': 'pipelineTimeout', 'type': 'int'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
graph_draft_id: Optional[str] = None,
source_pipeline_run_id: Optional[str] = None,
latest_pipeline_run_id: Optional[str] = None,
latest_run_experiment_name: Optional[str] = None,
latest_run_experiment_id: Optional[str] = None,
is_latest_run_experiment_archived: Optional[bool] = None,
status: Optional["PipelineStatus"] = None,
graph_detail: Optional["PipelineRunGraphDetail"] = None,
real_time_endpoint_info: Optional["RealTimeEndpointInfo"] = None,
linked_pipelines_info: Optional[List["LinkedPipelineInfo"]] = None,
nodes_in_draft: Optional[List[str]] = None,
studio_migration_info: Optional["StudioMigrationInfo"] = None,
flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None,
pipeline_run_setting_parameters: Optional[List["RunSettingParameter"]] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
continue_run_on_step_failure: Optional[bool] = None,
continue_run_on_failed_optional_input: Optional[bool] = None,
default_compute: Optional["ComputeSetting"] = None,
default_datastore: Optional["DatastoreSetting"] = None,
default_cloud_priority: Optional["CloudPrioritySetting"] = None,
enforce_rerun: Optional[bool] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
pipeline_timeout: Optional[int] = None,
identity_config: Optional["IdentitySetting"] = None,
graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None,
name: Optional[str] = None,
last_edited_by: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword source_pipeline_run_id:
:paramtype source_pipeline_run_id: str
:keyword latest_pipeline_run_id:
:paramtype latest_pipeline_run_id: str
:keyword latest_run_experiment_name:
:paramtype latest_run_experiment_name: str
:keyword latest_run_experiment_id:
:paramtype latest_run_experiment_id: str
:keyword is_latest_run_experiment_archived:
:paramtype is_latest_run_experiment_archived: bool
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword linked_pipelines_info:
:paramtype linked_pipelines_info: list[~flow.models.LinkedPipelineInfo]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword studio_migration_info:
:paramtype studio_migration_info: ~flow.models.StudioMigrationInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword pipeline_timeout:
:paramtype pipeline_timeout: int
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraft, self).__init__(**kwargs)
self.graph_draft_id = graph_draft_id
self.source_pipeline_run_id = source_pipeline_run_id
self.latest_pipeline_run_id = latest_pipeline_run_id
self.latest_run_experiment_name = latest_run_experiment_name
self.latest_run_experiment_id = latest_run_experiment_id
self.is_latest_run_experiment_archived = is_latest_run_experiment_archived
self.status = status
self.graph_detail = graph_detail
self.real_time_endpoint_info = real_time_endpoint_info
self.linked_pipelines_info = linked_pipelines_info
self.nodes_in_draft = nodes_in_draft
self.studio_migration_info = studio_migration_info
self.flattened_sub_graphs = flattened_sub_graphs
self.pipeline_run_setting_parameters = pipeline_run_setting_parameters
self.pipeline_run_settings = pipeline_run_settings
self.continue_run_on_step_failure = continue_run_on_step_failure
self.continue_run_on_failed_optional_input = continue_run_on_failed_optional_input
self.default_compute = default_compute
self.default_datastore = default_datastore
self.default_cloud_priority = default_cloud_priority
self.enforce_rerun = enforce_rerun
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.pipeline_timeout = pipeline_timeout
self.identity_config = identity_config
self.graph_components_mode = graph_components_mode
self.name = name
self.last_edited_by = last_edited_by
self.created_by = created_by
self.description = description
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.tags = tags
self.properties = properties
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineDraftStepDetails(msrest.serialization.Model):
"""PipelineDraftStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_run_id:
:vartype reused_run_id: str
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar output_log:
:vartype output_log: str
:ivar run_configuration:
:vartype run_configuration: ~flow.models.RunConfiguration
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_run_id': {'key': 'reusedRunId', 'type': 'str'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'logs': {'key': 'logs', 'type': '{str}'},
'output_log': {'key': 'outputLog', 'type': 'str'},
'run_configuration': {'key': 'runConfiguration', 'type': 'RunConfiguration'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
target: Optional[str] = None,
status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
parent_run_id: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
is_reused: Optional[bool] = None,
reused_run_id: Optional[str] = None,
reused_pipeline_run_id: Optional[str] = None,
logs: Optional[Dict[str, str]] = None,
output_log: Optional[str] = None,
run_configuration: Optional["RunConfiguration"] = None,
outputs: Optional[Dict[str, str]] = None,
port_outputs: Optional[Dict[str, "PortOutputInfo"]] = None,
is_experiment_archived: Optional[bool] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_run_id:
:paramtype reused_run_id: str
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword output_log:
:paramtype output_log: str
:keyword run_configuration:
:paramtype run_configuration: ~flow.models.RunConfiguration
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineDraftStepDetails, self).__init__(**kwargs)
self.run_id = run_id
self.target = target
self.status = status
self.status_detail = status_detail
self.parent_run_id = parent_run_id
self.start_time = start_time
self.end_time = end_time
self.is_reused = is_reused
self.reused_run_id = reused_run_id
self.reused_pipeline_run_id = reused_pipeline_run_id
self.logs = logs
self.output_log = output_log
self.run_configuration = run_configuration
self.outputs = outputs
self.port_outputs = port_outputs
self.is_experiment_archived = is_experiment_archived
class PipelineDraftSummary(msrest.serialization.Model):
"""PipelineDraftSummary.
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
name: Optional[str] = None,
last_edited_by: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineDraftSummary, self).__init__(**kwargs)
self.name = name
self.last_edited_by = last_edited_by
self.created_by = created_by
self.description = description
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.tags = tags
self.properties = properties
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineEndpoint(msrest.serialization.Model):
"""PipelineEndpoint.
:ivar default_version:
:vartype default_version: str
:ivar default_pipeline_id:
:vartype default_pipeline_id: str
:ivar default_graph_id:
:vartype default_graph_id: str
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar published_by:
:vartype published_by: str
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar default_pipeline_name:
:vartype default_pipeline_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'default_version': {'key': 'defaultVersion', 'type': 'str'},
'default_pipeline_id': {'key': 'defaultPipelineId', 'type': 'str'},
'default_graph_id': {'key': 'defaultGraphId', 'type': 'str'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'default_pipeline_name': {'key': 'defaultPipelineName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
default_version: Optional[str] = None,
default_pipeline_id: Optional[str] = None,
default_graph_id: Optional[str] = None,
rest_endpoint: Optional[str] = None,
published_date: Optional[datetime.datetime] = None,
published_by: Optional[str] = None,
parameters: Optional[Dict[str, str]] = None,
data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
default_pipeline_name: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
updated_by: Optional[str] = None,
swagger_url: Optional[str] = None,
last_run_time: Optional[datetime.datetime] = None,
last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None,
tags: Optional[Dict[str, str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword default_version:
:paramtype default_version: str
:keyword default_pipeline_id:
:paramtype default_pipeline_id: str
:keyword default_graph_id:
:paramtype default_graph_id: str
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword published_by:
:paramtype published_by: str
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword default_pipeline_name:
:paramtype default_pipeline_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpoint, self).__init__(**kwargs)
self.default_version = default_version
self.default_pipeline_id = default_pipeline_id
self.default_graph_id = default_graph_id
self.rest_endpoint = rest_endpoint
self.published_date = published_date
self.published_by = published_by
self.parameters = parameters
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.default_pipeline_name = default_pipeline_name
self.name = name
self.description = description
self.updated_by = updated_by
self.swagger_url = swagger_url
self.last_run_time = last_run_time
self.last_run_status = last_run_status
self.tags = tags
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineEndpointSummary(msrest.serialization.Model):
"""PipelineEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar updated_by:
:vartype updated_by: str
:ivar swagger_url:
:vartype swagger_url: str
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
'swagger_url': {'key': 'swaggerUrl', 'type': 'str'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
updated_by: Optional[str] = None,
swagger_url: Optional[str] = None,
last_run_time: Optional[datetime.datetime] = None,
last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None,
tags: Optional[Dict[str, str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword updated_by:
:paramtype updated_by: str
:keyword swagger_url:
:paramtype swagger_url: str
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineEndpointSummary, self).__init__(**kwargs)
self.name = name
self.description = description
self.updated_by = updated_by
self.swagger_url = swagger_url
self.last_run_time = last_run_time
self.last_run_status = last_run_status
self.tags = tags
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineGraph(msrest.serialization.Model):
"""PipelineGraph.
:ivar graph_module_dtos:
:vartype graph_module_dtos: list[~flow.models.ModuleDto]
:ivar graph_data_sources:
:vartype graph_data_sources: list[~flow.models.DataInfo]
:ivar graphs: This is a dictionary.
:vartype graphs: dict[str, ~flow.models.PipelineGraph]
:ivar graph_drafts: This is a dictionary.
:vartype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar referenced_node_id:
:vartype referenced_node_id: str
:ivar pipeline_run_setting_parameters:
:vartype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar real_time_endpoint_info:
:vartype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:ivar node_telemetry_meta_infos:
:vartype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar module_nodes:
:vartype module_nodes: list[~flow.models.GraphModuleNode]
:ivar dataset_nodes:
:vartype dataset_nodes: list[~flow.models.GraphDatasetNode]
:ivar sub_graph_nodes:
:vartype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:ivar control_reference_nodes:
:vartype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:ivar control_nodes:
:vartype control_nodes: list[~flow.models.GraphControlNode]
:ivar edges:
:vartype edges: list[~flow.models.GraphEdge]
:ivar entity_interface:
:vartype entity_interface: ~flow.models.EntityInterface
:ivar graph_layout:
:vartype graph_layout: ~flow.models.GraphLayout
:ivar created_by:
:vartype created_by: ~flow.models.CreatedBy
:ivar last_updated_by:
:vartype last_updated_by: ~flow.models.CreatedBy
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar extended_properties: This is a dictionary.
:vartype extended_properties: dict[str, str]
:ivar parent_sub_graph_module_ids:
:vartype parent_sub_graph_module_ids: list[str]
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'graph_module_dtos': {'key': 'graphModuleDtos', 'type': '[ModuleDto]'},
'graph_data_sources': {'key': 'graphDataSources', 'type': '[DataInfo]'},
'graphs': {'key': 'graphs', 'type': '{PipelineGraph}'},
'graph_drafts': {'key': 'graphDrafts', 'type': '{PipelineGraph}'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'referenced_node_id': {'key': 'referencedNodeId', 'type': 'str'},
'pipeline_run_setting_parameters': {'key': 'pipelineRunSettingParameters', 'type': '[RunSettingParameter]'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'real_time_endpoint_info': {'key': 'realTimeEndpointInfo', 'type': 'RealTimeEndpointInfo'},
'node_telemetry_meta_infos': {'key': 'nodeTelemetryMetaInfos', 'type': '[NodeTelemetryMetaInfo]'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'module_nodes': {'key': 'moduleNodes', 'type': '[GraphModuleNode]'},
'dataset_nodes': {'key': 'datasetNodes', 'type': '[GraphDatasetNode]'},
'sub_graph_nodes': {'key': 'subGraphNodes', 'type': '[GraphReferenceNode]'},
'control_reference_nodes': {'key': 'controlReferenceNodes', 'type': '[GraphControlReferenceNode]'},
'control_nodes': {'key': 'controlNodes', 'type': '[GraphControlNode]'},
'edges': {'key': 'edges', 'type': '[GraphEdge]'},
'entity_interface': {'key': 'entityInterface', 'type': 'EntityInterface'},
'graph_layout': {'key': 'graphLayout', 'type': 'GraphLayout'},
'created_by': {'key': 'createdBy', 'type': 'CreatedBy'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'CreatedBy'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'extended_properties': {'key': 'extendedProperties', 'type': '{str}'},
'parent_sub_graph_module_ids': {'key': 'parentSubGraphModuleIds', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
graph_module_dtos: Optional[List["ModuleDto"]] = None,
graph_data_sources: Optional[List["DataInfo"]] = None,
graphs: Optional[Dict[str, "PipelineGraph"]] = None,
graph_drafts: Optional[Dict[str, "PipelineGraph"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
referenced_node_id: Optional[str] = None,
pipeline_run_setting_parameters: Optional[List["RunSettingParameter"]] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
real_time_endpoint_info: Optional["RealTimeEndpointInfo"] = None,
node_telemetry_meta_infos: Optional[List["NodeTelemetryMetaInfo"]] = None,
graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None,
module_nodes: Optional[List["GraphModuleNode"]] = None,
dataset_nodes: Optional[List["GraphDatasetNode"]] = None,
sub_graph_nodes: Optional[List["GraphReferenceNode"]] = None,
control_reference_nodes: Optional[List["GraphControlReferenceNode"]] = None,
control_nodes: Optional[List["GraphControlNode"]] = None,
edges: Optional[List["GraphEdge"]] = None,
entity_interface: Optional["EntityInterface"] = None,
graph_layout: Optional["GraphLayout"] = None,
created_by: Optional["CreatedBy"] = None,
last_updated_by: Optional["CreatedBy"] = None,
default_compute: Optional["ComputeSetting"] = None,
default_datastore: Optional["DatastoreSetting"] = None,
default_cloud_priority: Optional["CloudPrioritySetting"] = None,
extended_properties: Optional[Dict[str, str]] = None,
parent_sub_graph_module_ids: Optional[List[str]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword graph_module_dtos:
:paramtype graph_module_dtos: list[~flow.models.ModuleDto]
:keyword graph_data_sources:
:paramtype graph_data_sources: list[~flow.models.DataInfo]
:keyword graphs: This is a dictionary.
:paramtype graphs: dict[str, ~flow.models.PipelineGraph]
:keyword graph_drafts: This is a dictionary.
:paramtype graph_drafts: dict[str, ~flow.models.PipelineGraph]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword referenced_node_id:
:paramtype referenced_node_id: str
:keyword pipeline_run_setting_parameters:
:paramtype pipeline_run_setting_parameters: list[~flow.models.RunSettingParameter]
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword real_time_endpoint_info:
:paramtype real_time_endpoint_info: ~flow.models.RealTimeEndpointInfo
:keyword node_telemetry_meta_infos:
:paramtype node_telemetry_meta_infos: list[~flow.models.NodeTelemetryMetaInfo]
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword module_nodes:
:paramtype module_nodes: list[~flow.models.GraphModuleNode]
:keyword dataset_nodes:
:paramtype dataset_nodes: list[~flow.models.GraphDatasetNode]
:keyword sub_graph_nodes:
:paramtype sub_graph_nodes: list[~flow.models.GraphReferenceNode]
:keyword control_reference_nodes:
:paramtype control_reference_nodes: list[~flow.models.GraphControlReferenceNode]
:keyword control_nodes:
:paramtype control_nodes: list[~flow.models.GraphControlNode]
:keyword edges:
:paramtype edges: list[~flow.models.GraphEdge]
:keyword entity_interface:
:paramtype entity_interface: ~flow.models.EntityInterface
:keyword graph_layout:
:paramtype graph_layout: ~flow.models.GraphLayout
:keyword created_by:
:paramtype created_by: ~flow.models.CreatedBy
:keyword last_updated_by:
:paramtype last_updated_by: ~flow.models.CreatedBy
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword extended_properties: This is a dictionary.
:paramtype extended_properties: dict[str, str]
:keyword parent_sub_graph_module_ids:
:paramtype parent_sub_graph_module_ids: list[str]
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineGraph, self).__init__(**kwargs)
self.graph_module_dtos = graph_module_dtos
self.graph_data_sources = graph_data_sources
self.graphs = graphs
self.graph_drafts = graph_drafts
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.sub_pipelines_info = sub_pipelines_info
self.referenced_node_id = referenced_node_id
self.pipeline_run_setting_parameters = pipeline_run_setting_parameters
self.pipeline_run_settings = pipeline_run_settings
self.real_time_endpoint_info = real_time_endpoint_info
self.node_telemetry_meta_infos = node_telemetry_meta_infos
self.graph_components_mode = graph_components_mode
self.module_nodes = module_nodes
self.dataset_nodes = dataset_nodes
self.sub_graph_nodes = sub_graph_nodes
self.control_reference_nodes = control_reference_nodes
self.control_nodes = control_nodes
self.edges = edges
self.entity_interface = entity_interface
self.graph_layout = graph_layout
self.created_by = created_by
self.last_updated_by = last_updated_by
self.default_compute = default_compute
self.default_datastore = default_datastore
self.default_cloud_priority = default_cloud_priority
self.extended_properties = extended_properties
self.parent_sub_graph_module_ids = parent_sub_graph_module_ids
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineInput(msrest.serialization.Model):
"""PipelineInput.
:ivar data:
:vartype data: ~flow.models.InputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'InputData'},
}
def __init__(
self,
*,
data: Optional["InputData"] = None,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.InputData
"""
super(PipelineInput, self).__init__(**kwargs)
self.data = data
class PipelineJob(msrest.serialization.Model):
"""PipelineJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:vartype pipeline_job_type: str
:ivar pipeline:
:vartype pipeline: ~flow.models.Pipeline
:ivar compute_id:
:vartype compute_id: str
:ivar run_id:
:vartype run_id: str
:ivar settings: Anything.
:vartype settings: any
:ivar component_jobs: This is a dictionary.
:vartype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.JobInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.JobOutput]
:ivar bindings:
:vartype bindings: list[~flow.models.Binding]
:ivar jobs: This is a dictionary.
:vartype jobs: dict[str, any]
:ivar input_bindings: This is a dictionary.
:vartype input_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_bindings: This is a dictionary.
:vartype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar source_job_id:
:vartype source_job_id: str
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'pipeline_job_type': {'key': 'pipelineJobType', 'type': 'str'},
'pipeline': {'key': 'pipeline', 'type': 'Pipeline'},
'compute_id': {'key': 'computeId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'settings': {'key': 'settings', 'type': 'object'},
'component_jobs': {'key': 'componentJobs', 'type': '{MfeInternalV20211001ComponentJob}'},
'inputs': {'key': 'inputs', 'type': '{JobInput}'},
'outputs': {'key': 'outputs', 'type': '{JobOutput}'},
'bindings': {'key': 'bindings', 'type': '[Binding]'},
'jobs': {'key': 'jobs', 'type': '{object}'},
'input_bindings': {'key': 'inputBindings', 'type': '{InputDataBinding}'},
'output_bindings': {'key': 'outputBindings', 'type': '{OutputDataBinding}'},
'source_job_id': {'key': 'sourceJobId', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
job_type: Optional[Union[str, "JobType"]] = None,
pipeline_job_type: Optional[str] = None,
pipeline: Optional["Pipeline"] = None,
compute_id: Optional[str] = None,
run_id: Optional[str] = None,
settings: Optional[Any] = None,
component_jobs: Optional[Dict[str, "MfeInternalV20211001ComponentJob"]] = None,
inputs: Optional[Dict[str, "JobInput"]] = None,
outputs: Optional[Dict[str, "JobOutput"]] = None,
bindings: Optional[List["Binding"]] = None,
jobs: Optional[Dict[str, Any]] = None,
input_bindings: Optional[Dict[str, "InputDataBinding"]] = None,
output_bindings: Optional[Dict[str, "OutputDataBinding"]] = None,
source_job_id: Optional[str] = None,
provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None,
parent_job_name: Optional[str] = None,
display_name: Optional[str] = None,
experiment_name: Optional[str] = None,
status: Optional[Union[str, "JobStatus"]] = None,
interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None,
identity: Optional["MfeInternalIdentityConfiguration"] = None,
compute: Optional["ComputeConfiguration"] = None,
priority: Optional[int] = None,
output: Optional["JobOutputArtifacts"] = None,
is_archived: Optional[bool] = None,
schedule: Optional["ScheduleBase"] = None,
component_id: Optional[str] = None,
notification_setting: Optional["NotificationSetting"] = None,
secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword pipeline_job_type: The only acceptable values to pass in are None and "AzureML". The
default value is None.
:paramtype pipeline_job_type: str
:keyword pipeline:
:paramtype pipeline: ~flow.models.Pipeline
:keyword compute_id:
:paramtype compute_id: str
:keyword run_id:
:paramtype run_id: str
:keyword settings: Anything.
:paramtype settings: any
:keyword component_jobs: This is a dictionary.
:paramtype component_jobs: dict[str, ~flow.models.MfeInternalV20211001ComponentJob]
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.JobInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.JobOutput]
:keyword bindings:
:paramtype bindings: list[~flow.models.Binding]
:keyword jobs: This is a dictionary.
:paramtype jobs: dict[str, any]
:keyword input_bindings: This is a dictionary.
:paramtype input_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_bindings: This is a dictionary.
:paramtype output_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword source_job_id:
:paramtype source_job_id: str
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJob, self).__init__(**kwargs)
self.job_type = job_type
self.pipeline_job_type = pipeline_job_type
self.pipeline = pipeline
self.compute_id = compute_id
self.run_id = run_id
self.settings = settings
self.component_jobs = component_jobs
self.inputs = inputs
self.outputs = outputs
self.bindings = bindings
self.jobs = jobs
self.input_bindings = input_bindings
self.output_bindings = output_bindings
self.source_job_id = source_job_id
self.provisioning_state = provisioning_state
self.parent_job_name = parent_job_name
self.display_name = display_name
self.experiment_name = experiment_name
self.status = status
self.interaction_endpoints = interaction_endpoints
self.identity = identity
self.compute = compute
self.priority = priority
self.output = output
self.is_archived = is_archived
self.schedule = schedule
self.component_id = component_id
self.notification_setting = notification_setting
self.secrets_configuration = secrets_configuration
self.description = description
self.tags = tags
self.properties = properties
class PipelineJobRuntimeBasicSettings(msrest.serialization.Model):
"""PipelineJobRuntimeBasicSettings.
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar trigger_time_string:
:vartype trigger_time_string: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
_attribute_map = {
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'trigger_time_string': {'key': 'triggerTimeString', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
}
def __init__(
self,
*,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
experiment_name: Optional[str] = None,
pipeline_job_name: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
display_name: Optional[str] = None,
description: Optional[str] = None,
trigger_time_string: Optional[str] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
**kwargs
):
"""
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword trigger_time_string:
:paramtype trigger_time_string: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
"""
super(PipelineJobRuntimeBasicSettings, self).__init__(**kwargs)
self.pipeline_run_settings = pipeline_run_settings
self.experiment_name = experiment_name
self.pipeline_job_name = pipeline_job_name
self.tags = tags
self.display_name = display_name
self.description = description
self.trigger_time_string = trigger_time_string
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
class PipelineJobScheduleDto(msrest.serialization.Model):
"""PipelineJobScheduleDto.
:ivar system_data:
:vartype system_data: ~flow.models.SystemData
:ivar name:
:vartype name: str
:ivar pipeline_job_name:
:vartype pipeline_job_name: str
:ivar pipeline_job_runtime_settings:
:vartype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:ivar display_name:
:vartype display_name: str
:ivar trigger_type: Possible values include: "Recurrence", "Cron".
:vartype trigger_type: str or ~flow.models.TriggerType
:ivar recurrence:
:vartype recurrence: ~flow.models.Recurrence
:ivar cron:
:vartype cron: ~flow.models.Cron
:ivar status: Possible values include: "Enabled", "Disabled".
:vartype status: str or ~flow.models.ScheduleStatus
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_job_name': {'key': 'pipelineJobName', 'type': 'str'},
'pipeline_job_runtime_settings': {'key': 'pipelineJobRuntimeSettings', 'type': 'PipelineJobRuntimeBasicSettings'},
'display_name': {'key': 'displayName', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'str'},
'recurrence': {'key': 'recurrence', 'type': 'Recurrence'},
'cron': {'key': 'cron', 'type': 'Cron'},
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
system_data: Optional["SystemData"] = None,
name: Optional[str] = None,
pipeline_job_name: Optional[str] = None,
pipeline_job_runtime_settings: Optional["PipelineJobRuntimeBasicSettings"] = None,
display_name: Optional[str] = None,
trigger_type: Optional[Union[str, "TriggerType"]] = None,
recurrence: Optional["Recurrence"] = None,
cron: Optional["Cron"] = None,
status: Optional[Union[str, "ScheduleStatus"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword system_data:
:paramtype system_data: ~flow.models.SystemData
:keyword name:
:paramtype name: str
:keyword pipeline_job_name:
:paramtype pipeline_job_name: str
:keyword pipeline_job_runtime_settings:
:paramtype pipeline_job_runtime_settings: ~flow.models.PipelineJobRuntimeBasicSettings
:keyword display_name:
:paramtype display_name: str
:keyword trigger_type: Possible values include: "Recurrence", "Cron".
:paramtype trigger_type: str or ~flow.models.TriggerType
:keyword recurrence:
:paramtype recurrence: ~flow.models.Recurrence
:keyword cron:
:paramtype cron: ~flow.models.Cron
:keyword status: Possible values include: "Enabled", "Disabled".
:paramtype status: str or ~flow.models.ScheduleStatus
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(PipelineJobScheduleDto, self).__init__(**kwargs)
self.system_data = system_data
self.name = name
self.pipeline_job_name = pipeline_job_name
self.pipeline_job_runtime_settings = pipeline_job_runtime_settings
self.display_name = display_name
self.trigger_type = trigger_type
self.recurrence = recurrence
self.cron = cron
self.status = status
self.description = description
self.tags = tags
self.properties = properties
class PipelineOutput(msrest.serialization.Model):
"""PipelineOutput.
:ivar data:
:vartype data: ~flow.models.MfeInternalOutputData
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'MfeInternalOutputData'},
}
def __init__(
self,
*,
data: Optional["MfeInternalOutputData"] = None,
**kwargs
):
"""
:keyword data:
:paramtype data: ~flow.models.MfeInternalOutputData
"""
super(PipelineOutput, self).__init__(**kwargs)
self.data = data
class PipelineRun(msrest.serialization.Model):
"""PipelineRun.
:ivar pipeline_id:
:vartype pipeline_id: str
:ivar run_source:
:vartype run_source: str
:ivar run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:vartype run_type: str or ~flow.models.RunType
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar total_steps:
:vartype total_steps: int
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar user_alias:
:vartype user_alias: str
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar continue_run_on_failed_optional_input:
:vartype continue_run_on_failed_optional_input: bool
:ivar default_compute:
:vartype default_compute: ~flow.models.ComputeSetting
:ivar default_datastore:
:vartype default_datastore: ~flow.models.DatastoreSetting
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar pipeline_timeout_seconds:
:vartype pipeline_timeout_seconds: int
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar identity_config:
:vartype identity_config: ~flow.models.IdentitySetting
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'pipeline_id': {'key': 'pipelineId', 'type': 'str'},
'run_source': {'key': 'runSource', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'total_steps': {'key': 'totalSteps', 'type': 'int'},
'logs': {'key': 'logs', 'type': '{str}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'continue_run_on_failed_optional_input': {'key': 'continueRunOnFailedOptionalInput', 'type': 'bool'},
'default_compute': {'key': 'defaultCompute', 'type': 'ComputeSetting'},
'default_datastore': {'key': 'defaultDatastore', 'type': 'DatastoreSetting'},
'default_cloud_priority': {'key': 'defaultCloudPriority', 'type': 'CloudPrioritySetting'},
'pipeline_timeout_seconds': {'key': 'pipelineTimeoutSeconds', 'type': 'int'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'identity_config': {'key': 'identityConfig', 'type': 'IdentitySetting'},
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
pipeline_id: Optional[str] = None,
run_source: Optional[str] = None,
run_type: Optional[Union[str, "RunType"]] = None,
parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
total_steps: Optional[int] = None,
logs: Optional[Dict[str, str]] = None,
user_alias: Optional[str] = None,
enforce_rerun: Optional[bool] = None,
continue_run_on_failed_optional_input: Optional[bool] = None,
default_compute: Optional["ComputeSetting"] = None,
default_datastore: Optional["DatastoreSetting"] = None,
default_cloud_priority: Optional["CloudPrioritySetting"] = None,
pipeline_timeout_seconds: Optional[int] = None,
continue_run_on_step_failure: Optional[bool] = None,
identity_config: Optional["IdentitySetting"] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
run_number: Optional[int] = None,
status_code: Optional[Union[str, "PipelineStatusCode"]] = None,
run_status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
graph_id: Optional[str] = None,
experiment_id: Optional[str] = None,
experiment_name: Optional[str] = None,
is_experiment_archived: Optional[bool] = None,
submitted_by: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
step_tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
aether_start_time: Optional[datetime.datetime] = None,
aether_end_time: Optional[datetime.datetime] = None,
run_history_start_time: Optional[datetime.datetime] = None,
run_history_end_time: Optional[datetime.datetime] = None,
unique_child_run_compute_targets: Optional[List[str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword pipeline_id:
:paramtype pipeline_id: str
:keyword run_source:
:paramtype run_source: str
:keyword run_type: Possible values include: "HTTP", "SDK", "Schedule", "Portal".
:paramtype run_type: str or ~flow.models.RunType
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword total_steps:
:paramtype total_steps: int
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword user_alias:
:paramtype user_alias: str
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword continue_run_on_failed_optional_input:
:paramtype continue_run_on_failed_optional_input: bool
:keyword default_compute:
:paramtype default_compute: ~flow.models.ComputeSetting
:keyword default_datastore:
:paramtype default_datastore: ~flow.models.DatastoreSetting
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword pipeline_timeout_seconds:
:paramtype pipeline_timeout_seconds: int
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword identity_config:
:paramtype identity_config: ~flow.models.IdentitySetting
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRun, self).__init__(**kwargs)
self.pipeline_id = pipeline_id
self.run_source = run_source
self.run_type = run_type
self.parameters = parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.asset_output_settings_assignments = asset_output_settings_assignments
self.total_steps = total_steps
self.logs = logs
self.user_alias = user_alias
self.enforce_rerun = enforce_rerun
self.continue_run_on_failed_optional_input = continue_run_on_failed_optional_input
self.default_compute = default_compute
self.default_datastore = default_datastore
self.default_cloud_priority = default_cloud_priority
self.pipeline_timeout_seconds = pipeline_timeout_seconds
self.continue_run_on_step_failure = continue_run_on_step_failure
self.identity_config = identity_config
self.description = description
self.display_name = display_name
self.run_number = run_number
self.status_code = status_code
self.run_status = run_status
self.status_detail = status_detail
self.start_time = start_time
self.end_time = end_time
self.graph_id = graph_id
self.experiment_id = experiment_id
self.experiment_name = experiment_name
self.is_experiment_archived = is_experiment_archived
self.submitted_by = submitted_by
self.tags = tags
self.step_tags = step_tags
self.properties = properties
self.aether_start_time = aether_start_time
self.aether_end_time = aether_end_time
self.run_history_start_time = run_history_start_time
self.run_history_end_time = run_history_end_time
self.unique_child_run_compute_targets = unique_child_run_compute_targets
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineRunGraphDetail(msrest.serialization.Model):
"""PipelineRunGraphDetail.
:ivar graph:
:vartype graph: ~flow.models.PipelineGraph
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
_attribute_map = {
'graph': {'key': 'graph', 'type': 'PipelineGraph'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
}
def __init__(
self,
*,
graph: Optional["PipelineGraph"] = None,
graph_nodes_status: Optional[Dict[str, "GraphNodeStatusInfo"]] = None,
**kwargs
):
"""
:keyword graph:
:paramtype graph: ~flow.models.PipelineGraph
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
"""
super(PipelineRunGraphDetail, self).__init__(**kwargs)
self.graph = graph
self.graph_nodes_status = graph_nodes_status
class PipelineRunGraphStatus(msrest.serialization.Model):
"""PipelineRunGraphStatus.
:ivar status:
:vartype status: ~flow.models.PipelineStatus
:ivar graph_nodes_status: This is a dictionary.
:vartype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:ivar experiment_id:
:vartype experiment_id: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'PipelineStatus'},
'graph_nodes_status': {'key': 'graphNodesStatus', 'type': '{GraphNodeStatusInfo}'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
}
def __init__(
self,
*,
status: Optional["PipelineStatus"] = None,
graph_nodes_status: Optional[Dict[str, "GraphNodeStatusInfo"]] = None,
experiment_id: Optional[str] = None,
is_experiment_archived: Optional[bool] = None,
**kwargs
):
"""
:keyword status:
:paramtype status: ~flow.models.PipelineStatus
:keyword graph_nodes_status: This is a dictionary.
:paramtype graph_nodes_status: dict[str, ~flow.models.GraphNodeStatusInfo]
:keyword experiment_id:
:paramtype experiment_id: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
"""
super(PipelineRunGraphStatus, self).__init__(**kwargs)
self.status = status
self.graph_nodes_status = graph_nodes_status
self.experiment_id = experiment_id
self.is_experiment_archived = is_experiment_archived
class PipelineRunProfile(msrest.serialization.Model):
"""PipelineRunProfile.
:ivar run_id:
:vartype run_id: str
:ivar node_id:
:vartype node_id: str
:ivar run_url:
:vartype run_url: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar description:
:vartype description: str
:ivar status:
:vartype status: ~flow.models.PipelineRunStatus
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar profiling_time:
:vartype profiling_time: long
:ivar step_runs_profile:
:vartype step_runs_profile: list[~flow.models.StepRunProfile]
:ivar sub_pipeline_run_profile:
:vartype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'status': {'key': 'status', 'type': 'PipelineRunStatus'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'profiling_time': {'key': 'profilingTime', 'type': 'long'},
'step_runs_profile': {'key': 'stepRunsProfile', 'type': '[StepRunProfile]'},
'sub_pipeline_run_profile': {'key': 'subPipelineRunProfile', 'type': '[PipelineRunProfile]'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
node_id: Optional[str] = None,
run_url: Optional[str] = None,
experiment_name: Optional[str] = None,
experiment_id: Optional[str] = None,
description: Optional[str] = None,
status: Optional["PipelineRunStatus"] = None,
create_time: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
profiling_time: Optional[int] = None,
step_runs_profile: Optional[List["StepRunProfile"]] = None,
sub_pipeline_run_profile: Optional[List["PipelineRunProfile"]] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword run_url:
:paramtype run_url: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword description:
:paramtype description: str
:keyword status:
:paramtype status: ~flow.models.PipelineRunStatus
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword profiling_time:
:paramtype profiling_time: long
:keyword step_runs_profile:
:paramtype step_runs_profile: list[~flow.models.StepRunProfile]
:keyword sub_pipeline_run_profile:
:paramtype sub_pipeline_run_profile: list[~flow.models.PipelineRunProfile]
"""
super(PipelineRunProfile, self).__init__(**kwargs)
self.run_id = run_id
self.node_id = node_id
self.run_url = run_url
self.experiment_name = experiment_name
self.experiment_id = experiment_id
self.description = description
self.status = status
self.create_time = create_time
self.start_time = start_time
self.end_time = end_time
self.profiling_time = profiling_time
self.step_runs_profile = step_runs_profile
self.sub_pipeline_run_profile = sub_pipeline_run_profile
class PipelineRunStatus(msrest.serialization.Model):
"""PipelineRunStatus.
:ivar status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype status_code: str or ~flow.models.PipelineRunStatusCode
:ivar status_detail:
:vartype status_detail: str
:ivar creation_time:
:vartype creation_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
status_code: Optional[Union[str, "PipelineRunStatusCode"]] = None,
status_detail: Optional[str] = None,
creation_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:paramtype status_code: str or ~flow.models.PipelineRunStatusCode
:keyword status_detail:
:paramtype status_detail: str
:keyword creation_time:
:paramtype creation_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineRunStatus, self).__init__(**kwargs)
self.status_code = status_code
self.status_detail = status_detail
self.creation_time = creation_time
self.end_time = end_time
class PipelineRunStepDetails(msrest.serialization.Model):
"""PipelineRunStepDetails.
:ivar run_id:
:vartype run_id: str
:ivar target:
:vartype target: str
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_reused:
:vartype is_reused: bool
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar snapshot_info:
:vartype snapshot_info: ~flow.models.SnapshotInfo
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'logs': {'key': 'logs', 'type': '{str}'},
'outputs': {'key': 'outputs', 'type': '{str}'},
'snapshot_info': {'key': 'snapshotInfo', 'type': 'SnapshotInfo'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
target: Optional[str] = None,
status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
parent_run_id: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
is_reused: Optional[bool] = None,
logs: Optional[Dict[str, str]] = None,
outputs: Optional[Dict[str, str]] = None,
snapshot_info: Optional["SnapshotInfo"] = None,
input_datasets: Optional[List["DatasetLineage"]] = None,
output_datasets: Optional[List["OutputDatasetLineage"]] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword target:
:paramtype target: str
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword is_reused:
:paramtype is_reused: bool
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword snapshot_info:
:paramtype snapshot_info: ~flow.models.SnapshotInfo
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
"""
super(PipelineRunStepDetails, self).__init__(**kwargs)
self.run_id = run_id
self.target = target
self.status = status
self.status_detail = status_detail
self.parent_run_id = parent_run_id
self.start_time = start_time
self.end_time = end_time
self.is_reused = is_reused
self.logs = logs
self.outputs = outputs
self.snapshot_info = snapshot_info
self.input_datasets = input_datasets
self.output_datasets = output_datasets
class PipelineRunSummary(msrest.serialization.Model):
"""PipelineRunSummary.
:ivar description:
:vartype description: str
:ivar display_name:
:vartype display_name: str
:ivar run_number:
:vartype run_number: int
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar graph_id:
:vartype graph_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar is_experiment_archived:
:vartype is_experiment_archived: bool
:ivar submitted_by:
:vartype submitted_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar aether_start_time:
:vartype aether_start_time: ~datetime.datetime
:ivar aether_end_time:
:vartype aether_end_time: ~datetime.datetime
:ivar run_history_start_time:
:vartype run_history_start_time: ~datetime.datetime
:ivar run_history_end_time:
:vartype run_history_end_time: ~datetime.datetime
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
}
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'is_experiment_archived': {'key': 'isExperimentArchived', 'type': 'bool'},
'submitted_by': {'key': 'submittedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'aether_start_time': {'key': 'aetherStartTime', 'type': 'iso-8601'},
'aether_end_time': {'key': 'aetherEndTime', 'type': 'iso-8601'},
'run_history_start_time': {'key': 'runHistoryStartTime', 'type': 'iso-8601'},
'run_history_end_time': {'key': 'runHistoryEndTime', 'type': 'iso-8601'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
description: Optional[str] = None,
display_name: Optional[str] = None,
run_number: Optional[int] = None,
status_code: Optional[Union[str, "PipelineStatusCode"]] = None,
run_status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
graph_id: Optional[str] = None,
experiment_id: Optional[str] = None,
experiment_name: Optional[str] = None,
is_experiment_archived: Optional[bool] = None,
submitted_by: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
step_tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
aether_start_time: Optional[datetime.datetime] = None,
aether_end_time: Optional[datetime.datetime] = None,
run_history_start_time: Optional[datetime.datetime] = None,
run_history_end_time: Optional[datetime.datetime] = None,
unique_child_run_compute_targets: Optional[List[str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword description:
:paramtype description: str
:keyword display_name:
:paramtype display_name: str
:keyword run_number:
:paramtype run_number: int
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword graph_id:
:paramtype graph_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword is_experiment_archived:
:paramtype is_experiment_archived: bool
:keyword submitted_by:
:paramtype submitted_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword aether_start_time:
:paramtype aether_start_time: ~datetime.datetime
:keyword aether_end_time:
:paramtype aether_end_time: ~datetime.datetime
:keyword run_history_start_time:
:paramtype run_history_start_time: ~datetime.datetime
:keyword run_history_end_time:
:paramtype run_history_end_time: ~datetime.datetime
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineRunSummary, self).__init__(**kwargs)
self.description = description
self.display_name = display_name
self.run_number = run_number
self.status_code = status_code
self.run_status = run_status
self.status_detail = status_detail
self.start_time = start_time
self.end_time = end_time
self.graph_id = graph_id
self.experiment_id = experiment_id
self.experiment_name = experiment_name
self.is_experiment_archived = is_experiment_archived
self.submitted_by = submitted_by
self.tags = tags
self.step_tags = step_tags
self.properties = properties
self.aether_start_time = aether_start_time
self.aether_end_time = aether_end_time
self.run_history_start_time = run_history_start_time
self.run_history_end_time = run_history_end_time
self.unique_child_run_compute_targets = unique_child_run_compute_targets
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PipelineStatus(msrest.serialization.Model):
"""PipelineStatus.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:vartype status_code: str or ~flow.models.PipelineStatusCode
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar is_terminal_state:
:vartype is_terminal_state: bool
"""
_validation = {
'is_terminal_state': {'readonly': True},
}
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'is_terminal_state': {'key': 'isTerminalState', 'type': 'bool'},
}
def __init__(
self,
*,
status_code: Optional[Union[str, "PipelineStatusCode"]] = None,
run_status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword status_code: Possible values include: "NotStarted", "InDraft", "Preparing", "Running",
"Failed", "Finished", "Canceled", "Throttled", "Unknown".
:paramtype status_code: str or ~flow.models.PipelineStatusCode
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(PipelineStatus, self).__init__(**kwargs)
self.status_code = status_code
self.run_status = run_status
self.status_detail = status_detail
self.start_time = start_time
self.end_time = end_time
self.is_terminal_state = None
class PipelineStepRun(msrest.serialization.Model):
"""PipelineStepRun.
:ivar step_name:
:vartype step_name: str
:ivar run_number:
:vartype run_number: int
:ivar run_id:
:vartype run_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype run_status: str or ~flow.models.RunStatus
:ivar compute_target:
:vartype compute_target: str
:ivar compute_type:
:vartype compute_type: str
:ivar run_type:
:vartype run_type: str
:ivar step_type:
:vartype step_type: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
:ivar display_name:
:vartype display_name: str
"""
_attribute_map = {
'step_name': {'key': 'stepName', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'run_status': {'key': 'runStatus', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'step_type': {'key': 'stepType', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'display_name': {'key': 'displayName', 'type': 'str'},
}
def __init__(
self,
*,
step_name: Optional[str] = None,
run_number: Optional[int] = None,
run_id: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
run_status: Optional[Union[str, "RunStatus"]] = None,
compute_target: Optional[str] = None,
compute_type: Optional[str] = None,
run_type: Optional[str] = None,
step_type: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
is_reused: Optional[bool] = None,
display_name: Optional[str] = None,
**kwargs
):
"""
:keyword step_name:
:paramtype step_name: str
:keyword run_number:
:paramtype run_number: int
:keyword run_id:
:paramtype run_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword run_status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype run_status: str or ~flow.models.RunStatus
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_type:
:paramtype compute_type: str
:keyword run_type:
:paramtype run_type: str
:keyword step_type:
:paramtype step_type: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
:keyword display_name:
:paramtype display_name: str
"""
super(PipelineStepRun, self).__init__(**kwargs)
self.step_name = step_name
self.run_number = run_number
self.run_id = run_id
self.start_time = start_time
self.end_time = end_time
self.run_status = run_status
self.compute_target = compute_target
self.compute_type = compute_type
self.run_type = run_type
self.step_type = step_type
self.tags = tags
self.is_reused = is_reused
self.display_name = display_name
class PipelineStepRunOutputs(msrest.serialization.Model):
"""PipelineStepRunOutputs.
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, str]
:ivar port_outputs: This is a dictionary.
:vartype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
_attribute_map = {
'outputs': {'key': 'outputs', 'type': '{str}'},
'port_outputs': {'key': 'portOutputs', 'type': '{PortOutputInfo}'},
}
def __init__(
self,
*,
outputs: Optional[Dict[str, str]] = None,
port_outputs: Optional[Dict[str, "PortOutputInfo"]] = None,
**kwargs
):
"""
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, str]
:keyword port_outputs: This is a dictionary.
:paramtype port_outputs: dict[str, ~flow.models.PortOutputInfo]
"""
super(PipelineStepRunOutputs, self).__init__(**kwargs)
self.outputs = outputs
self.port_outputs = port_outputs
class PipelineSubDraft(msrest.serialization.Model):
"""PipelineSubDraft.
:ivar parent_graph_draft_id:
:vartype parent_graph_draft_id: str
:ivar parent_node_id:
:vartype parent_node_id: str
:ivar graph_detail:
:vartype graph_detail: ~flow.models.PipelineRunGraphDetail
:ivar module_dto:
:vartype module_dto: ~flow.models.ModuleDto
:ivar name:
:vartype name: str
:ivar last_edited_by:
:vartype last_edited_by: str
:ivar created_by:
:vartype created_by: str
:ivar description:
:vartype description: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'parent_graph_draft_id': {'key': 'parentGraphDraftId', 'type': 'str'},
'parent_node_id': {'key': 'parentNodeId', 'type': 'str'},
'graph_detail': {'key': 'graphDetail', 'type': 'PipelineRunGraphDetail'},
'module_dto': {'key': 'moduleDto', 'type': 'ModuleDto'},
'name': {'key': 'name', 'type': 'str'},
'last_edited_by': {'key': 'lastEditedBy', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
parent_graph_draft_id: Optional[str] = None,
parent_node_id: Optional[str] = None,
graph_detail: Optional["PipelineRunGraphDetail"] = None,
module_dto: Optional["ModuleDto"] = None,
name: Optional[str] = None,
last_edited_by: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword parent_graph_draft_id:
:paramtype parent_graph_draft_id: str
:keyword parent_node_id:
:paramtype parent_node_id: str
:keyword graph_detail:
:paramtype graph_detail: ~flow.models.PipelineRunGraphDetail
:keyword module_dto:
:paramtype module_dto: ~flow.models.ModuleDto
:keyword name:
:paramtype name: str
:keyword last_edited_by:
:paramtype last_edited_by: str
:keyword created_by:
:paramtype created_by: str
:keyword description:
:paramtype description: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PipelineSubDraft, self).__init__(**kwargs)
self.parent_graph_draft_id = parent_graph_draft_id
self.parent_node_id = parent_node_id
self.graph_detail = graph_detail
self.module_dto = module_dto
self.name = name
self.last_edited_by = last_edited_by
self.created_by = created_by
self.description = description
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.tags = tags
self.properties = properties
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PolicyValidationResponse(msrest.serialization.Model):
"""PolicyValidationResponse.
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
"""
_attribute_map = {
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
}
def __init__(
self,
*,
error_response: Optional["ErrorResponse"] = None,
next_action_interval_in_seconds: Optional[int] = None,
action_type: Optional[Union[str, "ActionType"]] = None,
**kwargs
):
"""
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
"""
super(PolicyValidationResponse, self).__init__(**kwargs)
self.error_response = error_response
self.next_action_interval_in_seconds = next_action_interval_in_seconds
self.action_type = action_type
class PortInfo(msrest.serialization.Model):
"""PortInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar graph_port_name:
:vartype graph_port_name: str
:ivar is_parameter:
:vartype is_parameter: bool
:ivar web_service_port:
:vartype web_service_port: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'graph_port_name': {'key': 'graphPortName', 'type': 'str'},
'is_parameter': {'key': 'isParameter', 'type': 'bool'},
'web_service_port': {'key': 'webServicePort', 'type': 'str'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
port_name: Optional[str] = None,
graph_port_name: Optional[str] = None,
is_parameter: Optional[bool] = None,
web_service_port: Optional[str] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword graph_port_name:
:paramtype graph_port_name: str
:keyword is_parameter:
:paramtype is_parameter: bool
:keyword web_service_port:
:paramtype web_service_port: str
"""
super(PortInfo, self).__init__(**kwargs)
self.node_id = node_id
self.port_name = port_name
self.graph_port_name = graph_port_name
self.is_parameter = is_parameter
self.web_service_port = web_service_port
class PortOutputInfo(msrest.serialization.Model):
"""PortOutputInfo.
:ivar container_uri:
:vartype container_uri: str
:ivar relative_path:
:vartype relative_path: str
:ivar preview_params:
:vartype preview_params: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:vartype data_reference_type: str or ~flow.models.DataReferenceType
:ivar is_file:
:vartype is_file: bool
:ivar supported_actions:
:vartype supported_actions: list[str or ~flow.models.PortAction]
"""
_attribute_map = {
'container_uri': {'key': 'containerUri', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'},
'preview_params': {'key': 'previewParams', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_reference_type': {'key': 'dataReferenceType', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
'supported_actions': {'key': 'supportedActions', 'type': '[str]'},
}
def __init__(
self,
*,
container_uri: Optional[str] = None,
relative_path: Optional[str] = None,
preview_params: Optional[str] = None,
model_output_path: Optional[str] = None,
data_store_name: Optional[str] = None,
data_reference_type: Optional[Union[str, "DataReferenceType"]] = None,
is_file: Optional[bool] = None,
supported_actions: Optional[List[Union[str, "PortAction"]]] = None,
**kwargs
):
"""
:keyword container_uri:
:paramtype container_uri: str
:keyword relative_path:
:paramtype relative_path: str
:keyword preview_params:
:paramtype preview_params: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_reference_type: Possible values include: "None", "AzureBlob", "AzureDataLake",
"AzureFiles", "AzureSqlDatabase", "AzurePostgresDatabase", "AzureDataLakeGen2", "DBFS",
"AzureMySqlDatabase", "Custom", "Hdfs".
:paramtype data_reference_type: str or ~flow.models.DataReferenceType
:keyword is_file:
:paramtype is_file: bool
:keyword supported_actions:
:paramtype supported_actions: list[str or ~flow.models.PortAction]
"""
super(PortOutputInfo, self).__init__(**kwargs)
self.container_uri = container_uri
self.relative_path = relative_path
self.preview_params = preview_params
self.model_output_path = model_output_path
self.data_store_name = data_store_name
self.data_reference_type = data_reference_type
self.is_file = is_file
self.supported_actions = supported_actions
class PriorityConfig(msrest.serialization.Model):
"""PriorityConfig.
:ivar job_priority:
:vartype job_priority: int
:ivar is_preemptible:
:vartype is_preemptible: bool
:ivar node_count_set:
:vartype node_count_set: list[int]
:ivar scale_interval:
:vartype scale_interval: int
"""
_attribute_map = {
'job_priority': {'key': 'jobPriority', 'type': 'int'},
'is_preemptible': {'key': 'isPreemptible', 'type': 'bool'},
'node_count_set': {'key': 'nodeCountSet', 'type': '[int]'},
'scale_interval': {'key': 'scaleInterval', 'type': 'int'},
}
def __init__(
self,
*,
job_priority: Optional[int] = None,
is_preemptible: Optional[bool] = None,
node_count_set: Optional[List[int]] = None,
scale_interval: Optional[int] = None,
**kwargs
):
"""
:keyword job_priority:
:paramtype job_priority: int
:keyword is_preemptible:
:paramtype is_preemptible: bool
:keyword node_count_set:
:paramtype node_count_set: list[int]
:keyword scale_interval:
:paramtype scale_interval: int
"""
super(PriorityConfig, self).__init__(**kwargs)
self.job_priority = job_priority
self.is_preemptible = is_preemptible
self.node_count_set = node_count_set
self.scale_interval = scale_interval
class PriorityConfiguration(msrest.serialization.Model):
"""PriorityConfiguration.
:ivar cloud_priority:
:vartype cloud_priority: int
:ivar string_type_priority:
:vartype string_type_priority: str
"""
_attribute_map = {
'cloud_priority': {'key': 'cloudPriority', 'type': 'int'},
'string_type_priority': {'key': 'stringTypePriority', 'type': 'str'},
}
def __init__(
self,
*,
cloud_priority: Optional[int] = None,
string_type_priority: Optional[str] = None,
**kwargs
):
"""
:keyword cloud_priority:
:paramtype cloud_priority: int
:keyword string_type_priority:
:paramtype string_type_priority: str
"""
super(PriorityConfiguration, self).__init__(**kwargs)
self.cloud_priority = cloud_priority
self.string_type_priority = string_type_priority
class PromoteDataSetRequest(msrest.serialization.Model):
"""PromoteDataSetRequest.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar module_node_id:
:vartype module_node_id: str
:ivar step_run_id:
:vartype step_run_id: str
:ivar output_port_name:
:vartype output_port_name: str
:ivar model_output_path:
:vartype model_output_path: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar dataset_type:
:vartype dataset_type: str
:ivar data_store_name:
:vartype data_store_name: str
:ivar output_relative_path:
:vartype output_relative_path: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar root_pipeline_run_id:
:vartype root_pipeline_run_id: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar experiment_id:
:vartype experiment_id: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'output_port_name': {'key': 'outputPortName', 'type': 'str'},
'model_output_path': {'key': 'modelOutputPath', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'dataset_type': {'key': 'datasetType', 'type': 'str'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'output_relative_path': {'key': 'outputRelativePath', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'root_pipeline_run_id': {'key': 'rootPipelineRunId', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
module_node_id: Optional[str] = None,
step_run_id: Optional[str] = None,
output_port_name: Optional[str] = None,
model_output_path: Optional[str] = None,
data_type_id: Optional[str] = None,
dataset_type: Optional[str] = None,
data_store_name: Optional[str] = None,
output_relative_path: Optional[str] = None,
pipeline_run_id: Optional[str] = None,
root_pipeline_run_id: Optional[str] = None,
experiment_name: Optional[str] = None,
experiment_id: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword module_node_id:
:paramtype module_node_id: str
:keyword step_run_id:
:paramtype step_run_id: str
:keyword output_port_name:
:paramtype output_port_name: str
:keyword model_output_path:
:paramtype model_output_path: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword dataset_type:
:paramtype dataset_type: str
:keyword data_store_name:
:paramtype data_store_name: str
:keyword output_relative_path:
:paramtype output_relative_path: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword root_pipeline_run_id:
:paramtype root_pipeline_run_id: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword experiment_id:
:paramtype experiment_id: str
"""
super(PromoteDataSetRequest, self).__init__(**kwargs)
self.name = name
self.description = description
self.module_node_id = module_node_id
self.step_run_id = step_run_id
self.output_port_name = output_port_name
self.model_output_path = model_output_path
self.data_type_id = data_type_id
self.dataset_type = dataset_type
self.data_store_name = data_store_name
self.output_relative_path = output_relative_path
self.pipeline_run_id = pipeline_run_id
self.root_pipeline_run_id = root_pipeline_run_id
self.experiment_name = experiment_name
self.experiment_id = experiment_id
class ProviderEntity(msrest.serialization.Model):
"""ProviderEntity.
:ivar provider:
:vartype provider: str
:ivar module:
:vartype module: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar apis:
:vartype apis: list[~flow.models.ApiAndParameters]
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'module': {'key': 'module', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'apis': {'key': 'apis', 'type': '[ApiAndParameters]'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
module: Optional[str] = None,
connection_type: Optional[List[Union[str, "ConnectionType"]]] = None,
apis: Optional[List["ApiAndParameters"]] = None,
**kwargs
):
"""
:keyword provider:
:paramtype provider: str
:keyword module:
:paramtype module: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword apis:
:paramtype apis: list[~flow.models.ApiAndParameters]
"""
super(ProviderEntity, self).__init__(**kwargs)
self.provider = provider
self.module = module
self.connection_type = connection_type
self.apis = apis
class PublishedPipeline(msrest.serialization.Model):
"""PublishedPipeline.
:ivar total_run_steps:
:vartype total_run_steps: int
:ivar total_runs:
:vartype total_runs: int
:ivar parameters: This is a dictionary.
:vartype parameters: dict[str, str]
:ivar data_set_definition_value_assignment: This is a dictionary.
:vartype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar rest_endpoint:
:vartype rest_endpoint: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'total_run_steps': {'key': 'totalRunSteps', 'type': 'int'},
'total_runs': {'key': 'totalRuns', 'type': 'int'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': '{DataSetDefinitionValue}'},
'rest_endpoint': {'key': 'restEndpoint', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
total_run_steps: Optional[int] = None,
total_runs: Optional[int] = None,
parameters: Optional[Dict[str, str]] = None,
data_set_definition_value_assignment: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
rest_endpoint: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
graph_id: Optional[str] = None,
published_date: Optional[datetime.datetime] = None,
last_run_time: Optional[datetime.datetime] = None,
last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None,
published_by: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
version: Optional[str] = None,
is_default: Optional[bool] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword total_run_steps:
:paramtype total_run_steps: int
:keyword total_runs:
:paramtype total_runs: int
:keyword parameters: This is a dictionary.
:paramtype parameters: dict[str, str]
:keyword data_set_definition_value_assignment: This is a dictionary.
:paramtype data_set_definition_value_assignment: dict[str, ~flow.models.DataSetDefinitionValue]
:keyword rest_endpoint:
:paramtype rest_endpoint: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipeline, self).__init__(**kwargs)
self.total_run_steps = total_run_steps
self.total_runs = total_runs
self.parameters = parameters
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.rest_endpoint = rest_endpoint
self.name = name
self.description = description
self.graph_id = graph_id
self.published_date = published_date
self.last_run_time = last_run_time
self.last_run_status = last_run_status
self.published_by = published_by
self.tags = tags
self.properties = properties
self.version = version
self.is_default = is_default
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PublishedPipelineSummary(msrest.serialization.Model):
"""PublishedPipelineSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar graph_id:
:vartype graph_id: str
:ivar published_date:
:vartype published_date: ~datetime.datetime
:ivar last_run_time:
:vartype last_run_time: ~datetime.datetime
:ivar last_run_status: Possible values include: "NotStarted", "Running", "Failed", "Finished",
"Canceled", "Queued", "CancelRequested".
:vartype last_run_status: str or ~flow.models.PipelineRunStatusCode
:ivar published_by:
:vartype published_by: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar version:
:vartype version: str
:ivar is_default:
:vartype is_default: bool
:ivar entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:vartype entity_status: str or ~flow.models.EntityStatus
:ivar id:
:vartype id: str
:ivar etag:
:vartype etag: str
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'graph_id': {'key': 'graphId', 'type': 'str'},
'published_date': {'key': 'publishedDate', 'type': 'iso-8601'},
'last_run_time': {'key': 'lastRunTime', 'type': 'iso-8601'},
'last_run_status': {'key': 'lastRunStatus', 'type': 'str'},
'published_by': {'key': 'publishedBy', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'version': {'key': 'version', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'entity_status': {'key': 'entityStatus', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
graph_id: Optional[str] = None,
published_date: Optional[datetime.datetime] = None,
last_run_time: Optional[datetime.datetime] = None,
last_run_status: Optional[Union[str, "PipelineRunStatusCode"]] = None,
published_by: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
version: Optional[str] = None,
is_default: Optional[bool] = None,
entity_status: Optional[Union[str, "EntityStatus"]] = None,
id: Optional[str] = None,
etag: Optional[str] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword graph_id:
:paramtype graph_id: str
:keyword published_date:
:paramtype published_date: ~datetime.datetime
:keyword last_run_time:
:paramtype last_run_time: ~datetime.datetime
:keyword last_run_status: Possible values include: "NotStarted", "Running", "Failed",
"Finished", "Canceled", "Queued", "CancelRequested".
:paramtype last_run_status: str or ~flow.models.PipelineRunStatusCode
:keyword published_by:
:paramtype published_by: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword version:
:paramtype version: str
:keyword is_default:
:paramtype is_default: bool
:keyword entity_status: Possible values include: "Active", "Deprecated", "Disabled".
:paramtype entity_status: str or ~flow.models.EntityStatus
:keyword id:
:paramtype id: str
:keyword etag:
:paramtype etag: str
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
"""
super(PublishedPipelineSummary, self).__init__(**kwargs)
self.name = name
self.description = description
self.graph_id = graph_id
self.published_date = published_date
self.last_run_time = last_run_time
self.last_run_status = last_run_status
self.published_by = published_by
self.tags = tags
self.properties = properties
self.version = version
self.is_default = is_default
self.entity_status = entity_status
self.id = id
self.etag = etag
self.created_date = created_date
self.last_modified_date = last_modified_date
class PythonInterfaceMapping(msrest.serialization.Model):
"""PythonInterfaceMapping.
:ivar name:
:vartype name: str
:ivar name_in_yaml:
:vartype name_in_yaml: str
:ivar argument_name:
:vartype argument_name: str
:ivar command_line_option:
:vartype command_line_option: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'name_in_yaml': {'key': 'nameInYaml', 'type': 'str'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'command_line_option': {'key': 'commandLineOption', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
name_in_yaml: Optional[str] = None,
argument_name: Optional[str] = None,
command_line_option: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword name_in_yaml:
:paramtype name_in_yaml: str
:keyword argument_name:
:paramtype argument_name: str
:keyword command_line_option:
:paramtype command_line_option: str
"""
super(PythonInterfaceMapping, self).__init__(**kwargs)
self.name = name
self.name_in_yaml = name_in_yaml
self.argument_name = argument_name
self.command_line_option = command_line_option
class PythonPyPiOrRCranLibraryDto(msrest.serialization.Model):
"""PythonPyPiOrRCranLibraryDto.
:ivar package:
:vartype package: str
:ivar repo:
:vartype repo: str
"""
_attribute_map = {
'package': {'key': 'package', 'type': 'str'},
'repo': {'key': 'repo', 'type': 'str'},
}
def __init__(
self,
*,
package: Optional[str] = None,
repo: Optional[str] = None,
**kwargs
):
"""
:keyword package:
:paramtype package: str
:keyword repo:
:paramtype repo: str
"""
super(PythonPyPiOrRCranLibraryDto, self).__init__(**kwargs)
self.package = package
self.repo = repo
class PythonSection(msrest.serialization.Model):
"""PythonSection.
:ivar interpreter_path:
:vartype interpreter_path: str
:ivar user_managed_dependencies:
:vartype user_managed_dependencies: bool
:ivar conda_dependencies: Anything.
:vartype conda_dependencies: any
:ivar base_conda_environment:
:vartype base_conda_environment: str
"""
_attribute_map = {
'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
}
def __init__(
self,
*,
interpreter_path: Optional[str] = None,
user_managed_dependencies: Optional[bool] = None,
conda_dependencies: Optional[Any] = None,
base_conda_environment: Optional[str] = None,
**kwargs
):
"""
:keyword interpreter_path:
:paramtype interpreter_path: str
:keyword user_managed_dependencies:
:paramtype user_managed_dependencies: bool
:keyword conda_dependencies: Anything.
:paramtype conda_dependencies: any
:keyword base_conda_environment:
:paramtype base_conda_environment: str
"""
super(PythonSection, self).__init__(**kwargs)
self.interpreter_path = interpreter_path
self.user_managed_dependencies = user_managed_dependencies
self.conda_dependencies = conda_dependencies
self.base_conda_environment = base_conda_environment
class PyTorchConfiguration(msrest.serialization.Model):
"""PyTorchConfiguration.
:ivar communication_backend:
:vartype communication_backend: str
:ivar process_count:
:vartype process_count: int
"""
_attribute_map = {
'communication_backend': {'key': 'communicationBackend', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
communication_backend: Optional[str] = None,
process_count: Optional[int] = None,
**kwargs
):
"""
:keyword communication_backend:
:paramtype communication_backend: str
:keyword process_count:
:paramtype process_count: int
"""
super(PyTorchConfiguration, self).__init__(**kwargs)
self.communication_backend = communication_backend
self.process_count = process_count
class QueueingInfo(msrest.serialization.Model):
"""QueueingInfo.
:ivar code:
:vartype code: str
:ivar message:
:vartype message: str
:ivar last_refresh_timestamp:
:vartype last_refresh_timestamp: ~datetime.datetime
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
last_refresh_timestamp: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword code:
:paramtype code: str
:keyword message:
:paramtype message: str
:keyword last_refresh_timestamp:
:paramtype last_refresh_timestamp: ~datetime.datetime
"""
super(QueueingInfo, self).__init__(**kwargs)
self.code = code
self.message = message
self.last_refresh_timestamp = last_refresh_timestamp
class RawComponentDto(msrest.serialization.Model):
"""RawComponentDto.
:ivar component_schema:
:vartype component_schema: str
:ivar is_anonymous:
:vartype is_anonymous: bool
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar type: Possible values include: "Unknown", "CommandComponent", "Command".
:vartype type: str or ~flow.models.ComponentType
:ivar component_type_version:
:vartype component_type_version: str
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar is_deterministic:
:vartype is_deterministic: bool
:ivar successful_return_code:
:vartype successful_return_code: str
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.ComponentInput]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.ComponentOutput]
:ivar command:
:vartype command: str
:ivar environment_name:
:vartype environment_name: str
:ivar environment_version:
:vartype environment_version: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_date:
:vartype created_date: ~datetime.datetime
:ivar last_modified_date:
:vartype last_modified_date: ~datetime.datetime
:ivar component_internal_id:
:vartype component_internal_id: str
"""
_attribute_map = {
'component_schema': {'key': 'componentSchema', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'component_type_version': {'key': 'componentTypeVersion', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'is_deterministic': {'key': 'isDeterministic', 'type': 'bool'},
'successful_return_code': {'key': 'successfulReturnCode', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{ComponentInput}'},
'outputs': {'key': 'outputs', 'type': '{ComponentOutput}'},
'command': {'key': 'command', 'type': 'str'},
'environment_name': {'key': 'environmentName', 'type': 'str'},
'environment_version': {'key': 'environmentVersion', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'SchemaContractsCreatedBy'},
'created_date': {'key': 'createdDate', 'type': 'iso-8601'},
'last_modified_date': {'key': 'lastModifiedDate', 'type': 'iso-8601'},
'component_internal_id': {'key': 'componentInternalId', 'type': 'str'},
}
def __init__(
self,
*,
component_schema: Optional[str] = None,
is_anonymous: Optional[bool] = None,
name: Optional[str] = None,
version: Optional[str] = None,
type: Optional[Union[str, "ComponentType"]] = None,
component_type_version: Optional[str] = None,
display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
is_deterministic: Optional[bool] = None,
successful_return_code: Optional[str] = None,
inputs: Optional[Dict[str, "ComponentInput"]] = None,
outputs: Optional[Dict[str, "ComponentOutput"]] = None,
command: Optional[str] = None,
environment_name: Optional[str] = None,
environment_version: Optional[str] = None,
snapshot_id: Optional[str] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
last_modified_by: Optional["SchemaContractsCreatedBy"] = None,
created_date: Optional[datetime.datetime] = None,
last_modified_date: Optional[datetime.datetime] = None,
component_internal_id: Optional[str] = None,
**kwargs
):
"""
:keyword component_schema:
:paramtype component_schema: str
:keyword is_anonymous:
:paramtype is_anonymous: bool
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword type: Possible values include: "Unknown", "CommandComponent", "Command".
:paramtype type: str or ~flow.models.ComponentType
:keyword component_type_version:
:paramtype component_type_version: str
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword is_deterministic:
:paramtype is_deterministic: bool
:keyword successful_return_code:
:paramtype successful_return_code: str
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.ComponentInput]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.ComponentOutput]
:keyword command:
:paramtype command: str
:keyword environment_name:
:paramtype environment_name: str
:keyword environment_version:
:paramtype environment_version: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_date:
:paramtype created_date: ~datetime.datetime
:keyword last_modified_date:
:paramtype last_modified_date: ~datetime.datetime
:keyword component_internal_id:
:paramtype component_internal_id: str
"""
super(RawComponentDto, self).__init__(**kwargs)
self.component_schema = component_schema
self.is_anonymous = is_anonymous
self.name = name
self.version = version
self.type = type
self.component_type_version = component_type_version
self.display_name = display_name
self.description = description
self.tags = tags
self.properties = properties
self.is_deterministic = is_deterministic
self.successful_return_code = successful_return_code
self.inputs = inputs
self.outputs = outputs
self.command = command
self.environment_name = environment_name
self.environment_version = environment_version
self.snapshot_id = snapshot_id
self.created_by = created_by
self.last_modified_by = last_modified_by
self.created_date = created_date
self.last_modified_date = last_modified_date
self.component_internal_id = component_internal_id
class RayConfiguration(msrest.serialization.Model):
"""RayConfiguration.
:ivar port:
:vartype port: int
:ivar address:
:vartype address: str
:ivar include_dashboard:
:vartype include_dashboard: bool
:ivar dashboard_port:
:vartype dashboard_port: int
:ivar head_node_additional_args:
:vartype head_node_additional_args: str
:ivar worker_node_additional_args:
:vartype worker_node_additional_args: str
"""
_attribute_map = {
'port': {'key': 'port', 'type': 'int'},
'address': {'key': 'address', 'type': 'str'},
'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'},
'dashboard_port': {'key': 'dashboardPort', 'type': 'int'},
'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'},
'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'},
}
def __init__(
self,
*,
port: Optional[int] = None,
address: Optional[str] = None,
include_dashboard: Optional[bool] = None,
dashboard_port: Optional[int] = None,
head_node_additional_args: Optional[str] = None,
worker_node_additional_args: Optional[str] = None,
**kwargs
):
"""
:keyword port:
:paramtype port: int
:keyword address:
:paramtype address: str
:keyword include_dashboard:
:paramtype include_dashboard: bool
:keyword dashboard_port:
:paramtype dashboard_port: int
:keyword head_node_additional_args:
:paramtype head_node_additional_args: str
:keyword worker_node_additional_args:
:paramtype worker_node_additional_args: str
"""
super(RayConfiguration, self).__init__(**kwargs)
self.port = port
self.address = address
self.include_dashboard = include_dashboard
self.dashboard_port = dashboard_port
self.head_node_additional_args = head_node_additional_args
self.worker_node_additional_args = worker_node_additional_args
class RCranPackage(msrest.serialization.Model):
"""RCranPackage.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar repository:
:vartype repository: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
version: Optional[str] = None,
repository: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword repository:
:paramtype repository: str
"""
super(RCranPackage, self).__init__(**kwargs)
self.name = name
self.version = version
self.repository = repository
class RealTimeEndpoint(msrest.serialization.Model):
"""RealTimeEndpoint.
:ivar created_by:
:vartype created_by: str
:ivar kv_tags: Dictionary of :code:`<string>`.
:vartype kv_tags: dict[str, str]
:ivar state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:vartype state: str or ~flow.models.WebServiceState
:ivar error:
:vartype error: ~flow.models.ModelManagementErrorResponse
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar image_id:
:vartype image_id: str
:ivar cpu:
:vartype cpu: float
:ivar memory_in_gb:
:vartype memory_in_gb: float
:ivar max_concurrent_requests_per_container:
:vartype max_concurrent_requests_per_container: int
:ivar num_replicas:
:vartype num_replicas: int
:ivar event_hub_enabled:
:vartype event_hub_enabled: bool
:ivar storage_enabled:
:vartype storage_enabled: bool
:ivar app_insights_enabled:
:vartype app_insights_enabled: bool
:ivar auto_scale_enabled:
:vartype auto_scale_enabled: bool
:ivar min_replicas:
:vartype min_replicas: int
:ivar max_replicas:
:vartype max_replicas: int
:ivar target_utilization:
:vartype target_utilization: int
:ivar refresh_period_in_seconds:
:vartype refresh_period_in_seconds: int
:ivar scoring_uri:
:vartype scoring_uri: str
:ivar deployment_status:
:vartype deployment_status: ~flow.models.AKSReplicaStatus
:ivar scoring_timeout_ms:
:vartype scoring_timeout_ms: int
:ivar auth_enabled:
:vartype auth_enabled: bool
:ivar aad_auth_enabled:
:vartype aad_auth_enabled: bool
:ivar region:
:vartype region: str
:ivar primary_key:
:vartype primary_key: str
:ivar secondary_key:
:vartype secondary_key: str
:ivar swagger_uri:
:vartype swagger_uri: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
:ivar linked_pipeline_run_id:
:vartype linked_pipeline_run_id: str
:ivar warning:
:vartype warning: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'kv_tags': {'key': 'kvTags', 'type': '{str}'},
'state': {'key': 'state', 'type': 'str'},
'error': {'key': 'error', 'type': 'ModelManagementErrorResponse'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'image_id': {'key': 'imageId', 'type': 'str'},
'cpu': {'key': 'cpu', 'type': 'float'},
'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
'num_replicas': {'key': 'numReplicas', 'type': 'int'},
'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
'auto_scale_enabled': {'key': 'autoScaleEnabled', 'type': 'bool'},
'min_replicas': {'key': 'minReplicas', 'type': 'int'},
'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'AKSReplicaStatus'},
'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
'region': {'key': 'region', 'type': 'str'},
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
'linked_pipeline_run_id': {'key': 'linkedPipelineRunId', 'type': 'str'},
'warning': {'key': 'warning', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
kv_tags: Optional[Dict[str, str]] = None,
state: Optional[Union[str, "WebServiceState"]] = None,
error: Optional["ModelManagementErrorResponse"] = None,
compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None,
image_id: Optional[str] = None,
cpu: Optional[float] = None,
memory_in_gb: Optional[float] = None,
max_concurrent_requests_per_container: Optional[int] = None,
num_replicas: Optional[int] = None,
event_hub_enabled: Optional[bool] = None,
storage_enabled: Optional[bool] = None,
app_insights_enabled: Optional[bool] = None,
auto_scale_enabled: Optional[bool] = None,
min_replicas: Optional[int] = None,
max_replicas: Optional[int] = None,
target_utilization: Optional[int] = None,
refresh_period_in_seconds: Optional[int] = None,
scoring_uri: Optional[str] = None,
deployment_status: Optional["AKSReplicaStatus"] = None,
scoring_timeout_ms: Optional[int] = None,
auth_enabled: Optional[bool] = None,
aad_auth_enabled: Optional[bool] = None,
region: Optional[str] = None,
primary_key: Optional[str] = None,
secondary_key: Optional[str] = None,
swagger_uri: Optional[str] = None,
linked_pipeline_draft_id: Optional[str] = None,
linked_pipeline_run_id: Optional[str] = None,
warning: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
id: Optional[str] = None,
created_time: Optional[datetime.datetime] = None,
updated_time: Optional[datetime.datetime] = None,
compute_name: Optional[str] = None,
updated_by: Optional[str] = None,
**kwargs
):
"""
:keyword created_by:
:paramtype created_by: str
:keyword kv_tags: Dictionary of :code:`<string>`.
:paramtype kv_tags: dict[str, str]
:keyword state: Possible values include: "Transitioning", "Healthy", "Unhealthy", "Failed",
"Unschedulable".
:paramtype state: str or ~flow.models.WebServiceState
:keyword error:
:paramtype error: ~flow.models.ModelManagementErrorResponse
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword image_id:
:paramtype image_id: str
:keyword cpu:
:paramtype cpu: float
:keyword memory_in_gb:
:paramtype memory_in_gb: float
:keyword max_concurrent_requests_per_container:
:paramtype max_concurrent_requests_per_container: int
:keyword num_replicas:
:paramtype num_replicas: int
:keyword event_hub_enabled:
:paramtype event_hub_enabled: bool
:keyword storage_enabled:
:paramtype storage_enabled: bool
:keyword app_insights_enabled:
:paramtype app_insights_enabled: bool
:keyword auto_scale_enabled:
:paramtype auto_scale_enabled: bool
:keyword min_replicas:
:paramtype min_replicas: int
:keyword max_replicas:
:paramtype max_replicas: int
:keyword target_utilization:
:paramtype target_utilization: int
:keyword refresh_period_in_seconds:
:paramtype refresh_period_in_seconds: int
:keyword scoring_uri:
:paramtype scoring_uri: str
:keyword deployment_status:
:paramtype deployment_status: ~flow.models.AKSReplicaStatus
:keyword scoring_timeout_ms:
:paramtype scoring_timeout_ms: int
:keyword auth_enabled:
:paramtype auth_enabled: bool
:keyword aad_auth_enabled:
:paramtype aad_auth_enabled: bool
:keyword region:
:paramtype region: str
:keyword primary_key:
:paramtype primary_key: str
:keyword secondary_key:
:paramtype secondary_key: str
:keyword swagger_uri:
:paramtype swagger_uri: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
:keyword linked_pipeline_run_id:
:paramtype linked_pipeline_run_id: str
:keyword warning:
:paramtype warning: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpoint, self).__init__(**kwargs)
self.created_by = created_by
self.kv_tags = kv_tags
self.state = state
self.error = error
self.compute_type = compute_type
self.image_id = image_id
self.cpu = cpu
self.memory_in_gb = memory_in_gb
self.max_concurrent_requests_per_container = max_concurrent_requests_per_container
self.num_replicas = num_replicas
self.event_hub_enabled = event_hub_enabled
self.storage_enabled = storage_enabled
self.app_insights_enabled = app_insights_enabled
self.auto_scale_enabled = auto_scale_enabled
self.min_replicas = min_replicas
self.max_replicas = max_replicas
self.target_utilization = target_utilization
self.refresh_period_in_seconds = refresh_period_in_seconds
self.scoring_uri = scoring_uri
self.deployment_status = deployment_status
self.scoring_timeout_ms = scoring_timeout_ms
self.auth_enabled = auth_enabled
self.aad_auth_enabled = aad_auth_enabled
self.region = region
self.primary_key = primary_key
self.secondary_key = secondary_key
self.swagger_uri = swagger_uri
self.linked_pipeline_draft_id = linked_pipeline_draft_id
self.linked_pipeline_run_id = linked_pipeline_run_id
self.warning = warning
self.name = name
self.description = description
self.id = id
self.created_time = created_time
self.updated_time = updated_time
self.compute_name = compute_name
self.updated_by = updated_by
class RealTimeEndpointInfo(msrest.serialization.Model):
"""RealTimeEndpointInfo.
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar deployments_info:
:vartype deployments_info: list[~flow.models.DeploymentInfo]
"""
_attribute_map = {
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'deployments_info': {'key': 'deploymentsInfo', 'type': '[DeploymentInfo]'},
}
def __init__(
self,
*,
web_service_inputs: Optional[List["WebServicePort"]] = None,
web_service_outputs: Optional[List["WebServicePort"]] = None,
deployments_info: Optional[List["DeploymentInfo"]] = None,
**kwargs
):
"""
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword deployments_info:
:paramtype deployments_info: list[~flow.models.DeploymentInfo]
"""
super(RealTimeEndpointInfo, self).__init__(**kwargs)
self.web_service_inputs = web_service_inputs
self.web_service_outputs = web_service_outputs
self.deployments_info = deployments_info
class RealTimeEndpointStatus(msrest.serialization.Model):
"""RealTimeEndpointStatus.
:ivar last_operation: Possible values include: "Create", "Update", "Delete".
:vartype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:ivar last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:vartype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:ivar internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:vartype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:ivar status_detail:
:vartype status_detail: str
:ivar deployment_state:
:vartype deployment_state: str
:ivar service_id:
:vartype service_id: str
:ivar linked_pipeline_draft_id:
:vartype linked_pipeline_draft_id: str
"""
_attribute_map = {
'last_operation': {'key': 'lastOperation', 'type': 'str'},
'last_operation_status': {'key': 'lastOperationStatus', 'type': 'str'},
'internal_step': {'key': 'internalStep', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'deployment_state': {'key': 'deploymentState', 'type': 'str'},
'service_id': {'key': 'serviceId', 'type': 'str'},
'linked_pipeline_draft_id': {'key': 'linkedPipelineDraftId', 'type': 'str'},
}
def __init__(
self,
*,
last_operation: Optional[Union[str, "RealTimeEndpointOpCode"]] = None,
last_operation_status: Optional[Union[str, "RealTimeEndpointOpStatusCode"]] = None,
internal_step: Optional[Union[str, "RealTimeEndpointInternalStepCode"]] = None,
status_detail: Optional[str] = None,
deployment_state: Optional[str] = None,
service_id: Optional[str] = None,
linked_pipeline_draft_id: Optional[str] = None,
**kwargs
):
"""
:keyword last_operation: Possible values include: "Create", "Update", "Delete".
:paramtype last_operation: str or ~flow.models.RealTimeEndpointOpCode
:keyword last_operation_status: Possible values include: "Ongoing", "Succeeded", "Failed",
"SucceededWithWarning".
:paramtype last_operation_status: str or ~flow.models.RealTimeEndpointOpStatusCode
:keyword internal_step: Possible values include: "AboutToDeploy", "WaitAksComputeReady",
"RegisterModels", "CreateServiceFromModels", "UpdateServiceFromModels", "WaitServiceCreating",
"FetchServiceRelatedInfo", "TestWithSampleData", "AboutToDelete", "DeleteDeployment",
"DeleteAsset", "DeleteImage", "DeleteModel", "DeleteServiceRecord".
:paramtype internal_step: str or ~flow.models.RealTimeEndpointInternalStepCode
:keyword status_detail:
:paramtype status_detail: str
:keyword deployment_state:
:paramtype deployment_state: str
:keyword service_id:
:paramtype service_id: str
:keyword linked_pipeline_draft_id:
:paramtype linked_pipeline_draft_id: str
"""
super(RealTimeEndpointStatus, self).__init__(**kwargs)
self.last_operation = last_operation
self.last_operation_status = last_operation_status
self.internal_step = internal_step
self.status_detail = status_detail
self.deployment_state = deployment_state
self.service_id = service_id
self.linked_pipeline_draft_id = linked_pipeline_draft_id
class RealTimeEndpointSummary(msrest.serialization.Model):
"""RealTimeEndpointSummary.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar id:
:vartype id: str
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar updated_time:
:vartype updated_time: ~datetime.datetime
:ivar compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT", "AKSENDPOINT",
"MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE", "UNKNOWN".
:vartype compute_type: str or ~flow.models.ComputeEnvironmentType
:ivar compute_name:
:vartype compute_name: str
:ivar updated_by:
:vartype updated_by: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_name': {'key': 'computeName', 'type': 'str'},
'updated_by': {'key': 'updatedBy', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
id: Optional[str] = None,
created_time: Optional[datetime.datetime] = None,
updated_time: Optional[datetime.datetime] = None,
compute_type: Optional[Union[str, "ComputeEnvironmentType"]] = None,
compute_name: Optional[str] = None,
updated_by: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword id:
:paramtype id: str
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword updated_time:
:paramtype updated_time: ~datetime.datetime
:keyword compute_type: Possible values include: "ACI", "AKS", "AMLCOMPUTE", "IOT",
"AKSENDPOINT", "MIRSINGLEMODEL", "MIRAMLCOMPUTE", "MIRGA", "AMLARC", "BATCHAMLCOMPUTE",
"UNKNOWN".
:paramtype compute_type: str or ~flow.models.ComputeEnvironmentType
:keyword compute_name:
:paramtype compute_name: str
:keyword updated_by:
:paramtype updated_by: str
"""
super(RealTimeEndpointSummary, self).__init__(**kwargs)
self.name = name
self.description = description
self.id = id
self.created_time = created_time
self.updated_time = updated_time
self.compute_type = compute_type
self.compute_name = compute_name
self.updated_by = updated_by
class RealTimeEndpointTestRequest(msrest.serialization.Model):
"""RealTimeEndpointTestRequest.
:ivar end_point:
:vartype end_point: str
:ivar auth_key:
:vartype auth_key: str
:ivar payload:
:vartype payload: str
"""
_attribute_map = {
'end_point': {'key': 'endPoint', 'type': 'str'},
'auth_key': {'key': 'authKey', 'type': 'str'},
'payload': {'key': 'payload', 'type': 'str'},
}
def __init__(
self,
*,
end_point: Optional[str] = None,
auth_key: Optional[str] = None,
payload: Optional[str] = None,
**kwargs
):
"""
:keyword end_point:
:paramtype end_point: str
:keyword auth_key:
:paramtype auth_key: str
:keyword payload:
:paramtype payload: str
"""
super(RealTimeEndpointTestRequest, self).__init__(**kwargs)
self.end_point = end_point
self.auth_key = auth_key
self.payload = payload
class Recurrence(msrest.serialization.Model):
"""Recurrence.
:ivar frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:vartype frequency: str or ~flow.models.Frequency
:ivar interval:
:vartype interval: int
:ivar schedule:
:vartype schedule: ~flow.models.RecurrenceSchedule
:ivar end_time:
:vartype end_time: str
:ivar start_time:
:vartype start_time: str
:ivar time_zone:
:vartype time_zone: str
"""
_attribute_map = {
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'},
'end_time': {'key': 'endTime', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'str'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
}
def __init__(
self,
*,
frequency: Optional[Union[str, "Frequency"]] = None,
interval: Optional[int] = None,
schedule: Optional["RecurrenceSchedule"] = None,
end_time: Optional[str] = None,
start_time: Optional[str] = None,
time_zone: Optional[str] = None,
**kwargs
):
"""
:keyword frequency: Possible values include: "Month", "Week", "Day", "Hour", "Minute".
:paramtype frequency: str or ~flow.models.Frequency
:keyword interval:
:paramtype interval: int
:keyword schedule:
:paramtype schedule: ~flow.models.RecurrenceSchedule
:keyword end_time:
:paramtype end_time: str
:keyword start_time:
:paramtype start_time: str
:keyword time_zone:
:paramtype time_zone: str
"""
super(Recurrence, self).__init__(**kwargs)
self.frequency = frequency
self.interval = interval
self.schedule = schedule
self.end_time = end_time
self.start_time = start_time
self.time_zone = time_zone
class RecurrencePattern(msrest.serialization.Model):
"""RecurrencePattern.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar weekdays:
:vartype weekdays: list[str or ~flow.models.Weekday]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'weekdays': {'key': 'weekdays', 'type': '[str]'},
}
def __init__(
self,
*,
hours: Optional[List[int]] = None,
minutes: Optional[List[int]] = None,
weekdays: Optional[List[Union[str, "Weekday"]]] = None,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword weekdays:
:paramtype weekdays: list[str or ~flow.models.Weekday]
"""
super(RecurrencePattern, self).__init__(**kwargs)
self.hours = hours
self.minutes = minutes
self.weekdays = weekdays
class RecurrenceSchedule(msrest.serialization.Model):
"""RecurrenceSchedule.
:ivar hours:
:vartype hours: list[int]
:ivar minutes:
:vartype minutes: list[int]
:ivar week_days:
:vartype week_days: list[str or ~flow.models.WeekDays]
:ivar month_days:
:vartype month_days: list[int]
"""
_attribute_map = {
'hours': {'key': 'hours', 'type': '[int]'},
'minutes': {'key': 'minutes', 'type': '[int]'},
'week_days': {'key': 'weekDays', 'type': '[str]'},
'month_days': {'key': 'monthDays', 'type': '[int]'},
}
def __init__(
self,
*,
hours: Optional[List[int]] = None,
minutes: Optional[List[int]] = None,
week_days: Optional[List[Union[str, "WeekDays"]]] = None,
month_days: Optional[List[int]] = None,
**kwargs
):
"""
:keyword hours:
:paramtype hours: list[int]
:keyword minutes:
:paramtype minutes: list[int]
:keyword week_days:
:paramtype week_days: list[str or ~flow.models.WeekDays]
:keyword month_days:
:paramtype month_days: list[int]
"""
super(RecurrenceSchedule, self).__init__(**kwargs)
self.hours = hours
self.minutes = minutes
self.week_days = week_days
self.month_days = month_days
class RegenerateServiceKeysRequest(msrest.serialization.Model):
"""RegenerateServiceKeysRequest.
:ivar key_type: Possible values include: "Primary", "Secondary".
:vartype key_type: str or ~flow.models.KeyType
:ivar key_value:
:vartype key_value: str
"""
_attribute_map = {
'key_type': {'key': 'keyType', 'type': 'str'},
'key_value': {'key': 'keyValue', 'type': 'str'},
}
def __init__(
self,
*,
key_type: Optional[Union[str, "KeyType"]] = None,
key_value: Optional[str] = None,
**kwargs
):
"""
:keyword key_type: Possible values include: "Primary", "Secondary".
:paramtype key_type: str or ~flow.models.KeyType
:keyword key_value:
:paramtype key_value: str
"""
super(RegenerateServiceKeysRequest, self).__init__(**kwargs)
self.key_type = key_type
self.key_value = key_value
class RegisterComponentMetaInfo(msrest.serialization.Model):
"""RegisterComponentMetaInfo.
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
*,
aml_module_name: Optional[str] = None,
name_only_display_info: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
module_version_id: Optional[str] = None,
snapshot_id: Optional[str] = None,
component_registration_type: Optional[Union[str, "ComponentRegistrationTypeEnum"]] = None,
module_entity_from_yaml: Optional["ModuleEntity"] = None,
set_as_default_version: Optional[bool] = None,
data_types_from_yaml: Optional[List["DataTypeCreationInfo"]] = None,
data_type_mechanism: Optional[Union[str, "DataTypeMechanism"]] = None,
identifier_hash: Optional[str] = None,
identifier_hashes: Optional["RegisterComponentMetaInfoIdentifierHashes"] = None,
content_hash: Optional[str] = None,
extra_hash: Optional[str] = None,
extra_hashes: Optional["RegisterComponentMetaInfoExtraHashes"] = None,
registration: Optional[bool] = None,
validate_only: Optional[bool] = None,
skip_workspace_related_check: Optional[bool] = None,
intellectual_property_protected_workspace_component_registration_allowed_publisher: Optional[List[str]] = None,
system_managed_registration: Optional[bool] = None,
allow_dup_name_between_input_and_ouput_port: Optional[bool] = None,
module_source: Optional[str] = None,
module_scope: Optional[str] = None,
module_additional_includes_count: Optional[int] = None,
module_os_type: Optional[str] = None,
module_codegen_by: Optional[str] = None,
module_client_source: Optional[str] = None,
module_is_builtin: Optional[bool] = None,
module_register_event_extension_fields: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterComponentMetaInfo, self).__init__(**kwargs)
self.aml_module_name = aml_module_name
self.name_only_display_info = name_only_display_info
self.name = name
self.version = version
self.module_version_id = module_version_id
self.snapshot_id = snapshot_id
self.component_registration_type = component_registration_type
self.module_entity_from_yaml = module_entity_from_yaml
self.set_as_default_version = set_as_default_version
self.data_types_from_yaml = data_types_from_yaml
self.data_type_mechanism = data_type_mechanism
self.identifier_hash = identifier_hash
self.identifier_hashes = identifier_hashes
self.content_hash = content_hash
self.extra_hash = extra_hash
self.extra_hashes = extra_hashes
self.registration = registration
self.validate_only = validate_only
self.skip_workspace_related_check = skip_workspace_related_check
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = intellectual_property_protected_workspace_component_registration_allowed_publisher
self.system_managed_registration = system_managed_registration
self.allow_dup_name_between_input_and_ouput_port = allow_dup_name_between_input_and_ouput_port
self.module_source = module_source
self.module_scope = module_scope
self.module_additional_includes_count = module_additional_includes_count
self.module_os_type = module_os_type
self.module_codegen_by = module_codegen_by
self.module_client_source = module_client_source
self.module_is_builtin = module_is_builtin
self.module_register_event_extension_fields = module_register_event_extension_fields
class RegisterComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class RegisterComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class RegisteredDataSetReference(msrest.serialization.Model):
"""RegisteredDataSetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RegisteredDataSetReference, self).__init__(**kwargs)
self.id = id
self.name = name
self.version = version
class RegisterRegistryComponentMetaInfo(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfo.
:ivar registry_name:
:vartype registry_name: str
:ivar intellectual_property_publisher_information:
:vartype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:ivar blob_reference_data: This is a dictionary.
:vartype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:ivar aml_module_name:
:vartype aml_module_name: str
:ivar name_only_display_info:
:vartype name_only_display_info: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar module_version_id:
:vartype module_version_id: str
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:vartype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:ivar module_entity_from_yaml:
:vartype module_entity_from_yaml: ~flow.models.ModuleEntity
:ivar set_as_default_version:
:vartype set_as_default_version: bool
:ivar data_types_from_yaml:
:vartype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:ivar data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:vartype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:ivar content_hash:
:vartype content_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:ivar registration:
:vartype registration: bool
:ivar validate_only:
:vartype validate_only: bool
:ivar skip_workspace_related_check:
:vartype skip_workspace_related_check: bool
:ivar intellectual_property_protected_workspace_component_registration_allowed_publisher:
:vartype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:ivar system_managed_registration:
:vartype system_managed_registration: bool
:ivar allow_dup_name_between_input_and_ouput_port:
:vartype allow_dup_name_between_input_and_ouput_port: bool
:ivar module_source:
:vartype module_source: str
:ivar module_scope:
:vartype module_scope: str
:ivar module_additional_includes_count:
:vartype module_additional_includes_count: int
:ivar module_os_type:
:vartype module_os_type: str
:ivar module_codegen_by:
:vartype module_codegen_by: str
:ivar module_client_source:
:vartype module_client_source: str
:ivar module_is_builtin:
:vartype module_is_builtin: bool
:ivar module_register_event_extension_fields: Dictionary of :code:`<string>`.
:vartype module_register_event_extension_fields: dict[str, str]
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'intellectual_property_publisher_information': {'key': 'intellectualPropertyPublisherInformation', 'type': 'IntellectualPropertyPublisherInformation'},
'blob_reference_data': {'key': 'blobReferenceData', 'type': '{RegistryBlobReferenceData}'},
'aml_module_name': {'key': 'amlModuleName', 'type': 'str'},
'name_only_display_info': {'key': 'nameOnlyDisplayInfo', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'module_version_id': {'key': 'moduleVersionId', 'type': 'str'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'component_registration_type': {'key': 'componentRegistrationType', 'type': 'str'},
'module_entity_from_yaml': {'key': 'moduleEntityFromYaml', 'type': 'ModuleEntity'},
'set_as_default_version': {'key': 'setAsDefaultVersion', 'type': 'bool'},
'data_types_from_yaml': {'key': 'dataTypesFromYaml', 'type': '[DataTypeCreationInfo]'},
'data_type_mechanism': {'key': 'dataTypeMechanism', 'type': 'str'},
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'RegisterRegistryComponentMetaInfoIdentifierHashes'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'extra_hashes': {'key': 'extraHashes', 'type': 'RegisterRegistryComponentMetaInfoExtraHashes'},
'registration': {'key': 'registration', 'type': 'bool'},
'validate_only': {'key': 'validateOnly', 'type': 'bool'},
'skip_workspace_related_check': {'key': 'skipWorkspaceRelatedCheck', 'type': 'bool'},
'intellectual_property_protected_workspace_component_registration_allowed_publisher': {'key': 'intellectualPropertyProtectedWorkspaceComponentRegistrationAllowedPublisher', 'type': '[str]'},
'system_managed_registration': {'key': 'systemManagedRegistration', 'type': 'bool'},
'allow_dup_name_between_input_and_ouput_port': {'key': 'allowDupNameBetweenInputAndOuputPort', 'type': 'bool'},
'module_source': {'key': 'moduleSource', 'type': 'str'},
'module_scope': {'key': 'moduleScope', 'type': 'str'},
'module_additional_includes_count': {'key': 'moduleAdditionalIncludesCount', 'type': 'int'},
'module_os_type': {'key': 'moduleOSType', 'type': 'str'},
'module_codegen_by': {'key': 'moduleCodegenBy', 'type': 'str'},
'module_client_source': {'key': 'moduleClientSource', 'type': 'str'},
'module_is_builtin': {'key': 'moduleIsBuiltin', 'type': 'bool'},
'module_register_event_extension_fields': {'key': 'moduleRegisterEventExtensionFields', 'type': '{str}'},
}
def __init__(
self,
*,
registry_name: Optional[str] = None,
intellectual_property_publisher_information: Optional["IntellectualPropertyPublisherInformation"] = None,
blob_reference_data: Optional[Dict[str, "RegistryBlobReferenceData"]] = None,
aml_module_name: Optional[str] = None,
name_only_display_info: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
module_version_id: Optional[str] = None,
snapshot_id: Optional[str] = None,
component_registration_type: Optional[Union[str, "ComponentRegistrationTypeEnum"]] = None,
module_entity_from_yaml: Optional["ModuleEntity"] = None,
set_as_default_version: Optional[bool] = None,
data_types_from_yaml: Optional[List["DataTypeCreationInfo"]] = None,
data_type_mechanism: Optional[Union[str, "DataTypeMechanism"]] = None,
identifier_hash: Optional[str] = None,
identifier_hashes: Optional["RegisterRegistryComponentMetaInfoIdentifierHashes"] = None,
content_hash: Optional[str] = None,
extra_hash: Optional[str] = None,
extra_hashes: Optional["RegisterRegistryComponentMetaInfoExtraHashes"] = None,
registration: Optional[bool] = None,
validate_only: Optional[bool] = None,
skip_workspace_related_check: Optional[bool] = None,
intellectual_property_protected_workspace_component_registration_allowed_publisher: Optional[List[str]] = None,
system_managed_registration: Optional[bool] = None,
allow_dup_name_between_input_and_ouput_port: Optional[bool] = None,
module_source: Optional[str] = None,
module_scope: Optional[str] = None,
module_additional_includes_count: Optional[int] = None,
module_os_type: Optional[str] = None,
module_codegen_by: Optional[str] = None,
module_client_source: Optional[str] = None,
module_is_builtin: Optional[bool] = None,
module_register_event_extension_fields: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword intellectual_property_publisher_information:
:paramtype intellectual_property_publisher_information:
~flow.models.IntellectualPropertyPublisherInformation
:keyword blob_reference_data: This is a dictionary.
:paramtype blob_reference_data: dict[str, ~flow.models.RegistryBlobReferenceData]
:keyword aml_module_name:
:paramtype aml_module_name: str
:keyword name_only_display_info:
:paramtype name_only_display_info: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword module_version_id:
:paramtype module_version_id: str
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword component_registration_type: Possible values include: "Normal", "AnonymousAmlModule",
"AnonymousAmlModuleVersion", "ModuleEntityOnly".
:paramtype component_registration_type: str or ~flow.models.ComponentRegistrationTypeEnum
:keyword module_entity_from_yaml:
:paramtype module_entity_from_yaml: ~flow.models.ModuleEntity
:keyword set_as_default_version:
:paramtype set_as_default_version: bool
:keyword data_types_from_yaml:
:paramtype data_types_from_yaml: list[~flow.models.DataTypeCreationInfo]
:keyword data_type_mechanism: Possible values include: "ErrorWhenNotExisting",
"RegisterWhenNotExisting", "RegisterBuildinDataTypeOnly".
:paramtype data_type_mechanism: str or ~flow.models.DataTypeMechanism
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.RegisterRegistryComponentMetaInfoIdentifierHashes
:keyword content_hash:
:paramtype content_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.RegisterRegistryComponentMetaInfoExtraHashes
:keyword registration:
:paramtype registration: bool
:keyword validate_only:
:paramtype validate_only: bool
:keyword skip_workspace_related_check:
:paramtype skip_workspace_related_check: bool
:keyword intellectual_property_protected_workspace_component_registration_allowed_publisher:
:paramtype intellectual_property_protected_workspace_component_registration_allowed_publisher:
list[str]
:keyword system_managed_registration:
:paramtype system_managed_registration: bool
:keyword allow_dup_name_between_input_and_ouput_port:
:paramtype allow_dup_name_between_input_and_ouput_port: bool
:keyword module_source:
:paramtype module_source: str
:keyword module_scope:
:paramtype module_scope: str
:keyword module_additional_includes_count:
:paramtype module_additional_includes_count: int
:keyword module_os_type:
:paramtype module_os_type: str
:keyword module_codegen_by:
:paramtype module_codegen_by: str
:keyword module_client_source:
:paramtype module_client_source: str
:keyword module_is_builtin:
:paramtype module_is_builtin: bool
:keyword module_register_event_extension_fields: Dictionary of :code:`<string>`.
:paramtype module_register_event_extension_fields: dict[str, str]
"""
super(RegisterRegistryComponentMetaInfo, self).__init__(**kwargs)
self.registry_name = registry_name
self.intellectual_property_publisher_information = intellectual_property_publisher_information
self.blob_reference_data = blob_reference_data
self.aml_module_name = aml_module_name
self.name_only_display_info = name_only_display_info
self.name = name
self.version = version
self.module_version_id = module_version_id
self.snapshot_id = snapshot_id
self.component_registration_type = component_registration_type
self.module_entity_from_yaml = module_entity_from_yaml
self.set_as_default_version = set_as_default_version
self.data_types_from_yaml = data_types_from_yaml
self.data_type_mechanism = data_type_mechanism
self.identifier_hash = identifier_hash
self.identifier_hashes = identifier_hashes
self.content_hash = content_hash
self.extra_hash = extra_hash
self.extra_hashes = extra_hashes
self.registration = registration
self.validate_only = validate_only
self.skip_workspace_related_check = skip_workspace_related_check
self.intellectual_property_protected_workspace_component_registration_allowed_publisher = intellectual_property_protected_workspace_component_registration_allowed_publisher
self.system_managed_registration = system_managed_registration
self.allow_dup_name_between_input_and_ouput_port = allow_dup_name_between_input_and_ouput_port
self.module_source = module_source
self.module_scope = module_scope
self.module_additional_includes_count = module_additional_includes_count
self.module_os_type = module_os_type
self.module_codegen_by = module_codegen_by
self.module_client_source = module_client_source
self.module_is_builtin = module_is_builtin
self.module_register_event_extension_fields = module_register_event_extension_fields
class RegisterRegistryComponentMetaInfoExtraHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoExtraHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class RegisterRegistryComponentMetaInfoIdentifierHashes(msrest.serialization.Model):
"""RegisterRegistryComponentMetaInfoIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(RegisterRegistryComponentMetaInfoIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class RegistrationOptions(msrest.serialization.Model):
"""RegistrationOptions.
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar dataset_registration_options:
:vartype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'dataset_registration_options': {'key': 'datasetRegistrationOptions', 'type': 'DatasetRegistrationOptions'},
}
def __init__(
self,
*,
name: Optional[str] = None,
version: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
dataset_registration_options: Optional["DatasetRegistrationOptions"] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword dataset_registration_options:
:paramtype dataset_registration_options: ~flow.models.DatasetRegistrationOptions
"""
super(RegistrationOptions, self).__init__(**kwargs)
self.name = name
self.version = version
self.description = description
self.tags = tags
self.properties = properties
self.dataset_registration_options = dataset_registration_options
class RegistryBlobReferenceData(msrest.serialization.Model):
"""RegistryBlobReferenceData.
:ivar data_reference_id:
:vartype data_reference_id: str
:ivar data:
:vartype data: str
"""
_attribute_map = {
'data_reference_id': {'key': 'dataReferenceId', 'type': 'str'},
'data': {'key': 'data', 'type': 'str'},
}
def __init__(
self,
*,
data_reference_id: Optional[str] = None,
data: Optional[str] = None,
**kwargs
):
"""
:keyword data_reference_id:
:paramtype data_reference_id: str
:keyword data:
:paramtype data: str
"""
super(RegistryBlobReferenceData, self).__init__(**kwargs)
self.data_reference_id = data_reference_id
self.data = data
class RegistryIdentity(msrest.serialization.Model):
"""RegistryIdentity.
:ivar resource_id:
:vartype resource_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
*,
resource_id: Optional[str] = None,
client_id: Optional[str] = None,
**kwargs
):
"""
:keyword resource_id:
:paramtype resource_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(RegistryIdentity, self).__init__(**kwargs)
self.resource_id = resource_id
self.client_id = client_id
class Relationship(msrest.serialization.Model):
"""Relationship.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar relation_type:
:vartype relation_type: str
:ivar target_entity_id:
:vartype target_entity_id: str
:ivar asset_id:
:vartype asset_id: str
:ivar entity_type:
:vartype entity_type: str
:ivar direction:
:vartype direction: str
:ivar entity_container_id:
:vartype entity_container_id: str
"""
_validation = {
'entity_type': {'readonly': True},
'entity_container_id': {'readonly': True},
}
_attribute_map = {
'relation_type': {'key': 'relationType', 'type': 'str'},
'target_entity_id': {'key': 'targetEntityId', 'type': 'str'},
'asset_id': {'key': 'assetId', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'direction': {'key': 'direction', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
}
def __init__(
self,
*,
relation_type: Optional[str] = None,
target_entity_id: Optional[str] = None,
asset_id: Optional[str] = None,
direction: Optional[str] = None,
**kwargs
):
"""
:keyword relation_type:
:paramtype relation_type: str
:keyword target_entity_id:
:paramtype target_entity_id: str
:keyword asset_id:
:paramtype asset_id: str
:keyword direction:
:paramtype direction: str
"""
super(Relationship, self).__init__(**kwargs)
self.relation_type = relation_type
self.target_entity_id = target_entity_id
self.asset_id = asset_id
self.entity_type = None
self.direction = direction
self.entity_container_id = None
class RemoteDockerComputeInfo(msrest.serialization.Model):
"""RemoteDockerComputeInfo.
:ivar address:
:vartype address: str
:ivar username:
:vartype username: str
:ivar password:
:vartype password: str
:ivar private_key:
:vartype private_key: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'private_key': {'key': 'privateKey', 'type': 'str'},
}
def __init__(
self,
*,
address: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
private_key: Optional[str] = None,
**kwargs
):
"""
:keyword address:
:paramtype address: str
:keyword username:
:paramtype username: str
:keyword password:
:paramtype password: str
:keyword private_key:
:paramtype private_key: str
"""
super(RemoteDockerComputeInfo, self).__init__(**kwargs)
self.address = address
self.username = username
self.password = password
self.private_key = private_key
class ResourceConfig(msrest.serialization.Model):
"""ResourceConfig.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
*,
gpu_count: Optional[int] = None,
cpu_count: Optional[int] = None,
memory_request_in_gb: Optional[int] = None,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfig, self).__init__(**kwargs)
self.gpu_count = gpu_count
self.cpu_count = cpu_count
self.memory_request_in_gb = memory_request_in_gb
class ResourceConfiguration(msrest.serialization.Model):
"""ResourceConfiguration.
:ivar gpu_count:
:vartype gpu_count: int
:ivar cpu_count:
:vartype cpu_count: int
:ivar memory_request_in_gb:
:vartype memory_request_in_gb: int
"""
_attribute_map = {
'gpu_count': {'key': 'gpuCount', 'type': 'int'},
'cpu_count': {'key': 'cpuCount', 'type': 'int'},
'memory_request_in_gb': {'key': 'memoryRequestInGB', 'type': 'int'},
}
def __init__(
self,
*,
gpu_count: Optional[int] = None,
cpu_count: Optional[int] = None,
memory_request_in_gb: Optional[int] = None,
**kwargs
):
"""
:keyword gpu_count:
:paramtype gpu_count: int
:keyword cpu_count:
:paramtype cpu_count: int
:keyword memory_request_in_gb:
:paramtype memory_request_in_gb: int
"""
super(ResourceConfiguration, self).__init__(**kwargs)
self.gpu_count = gpu_count
self.cpu_count = cpu_count
self.memory_request_in_gb = memory_request_in_gb
class ResourcesSetting(msrest.serialization.Model):
"""ResourcesSetting.
:ivar instance_size:
:vartype instance_size: str
:ivar spark_version:
:vartype spark_version: str
"""
_attribute_map = {
'instance_size': {'key': 'instanceSize', 'type': 'str'},
'spark_version': {'key': 'sparkVersion', 'type': 'str'},
}
def __init__(
self,
*,
instance_size: Optional[str] = None,
spark_version: Optional[str] = None,
**kwargs
):
"""
:keyword instance_size:
:paramtype instance_size: str
:keyword spark_version:
:paramtype spark_version: str
"""
super(ResourcesSetting, self).__init__(**kwargs)
self.instance_size = instance_size
self.spark_version = spark_version
class RetrieveToolFuncResultRequest(msrest.serialization.Model):
"""RetrieveToolFuncResultRequest.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs: This is a dictionary.
:vartype func_kwargs: dict[str, any]
:ivar func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:vartype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '{object}'},
'func_call_scenario': {'key': 'func_call_scenario', 'type': 'str'},
}
def __init__(
self,
*,
func_path: Optional[str] = None,
func_kwargs: Optional[Dict[str, Any]] = None,
func_call_scenario: Optional[Union[str, "ToolFuncCallScenario"]] = None,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs: This is a dictionary.
:paramtype func_kwargs: dict[str, any]
:keyword func_call_scenario: Possible values include: "generated_by", "reverse_generated_by",
"dynamic_list".
:paramtype func_call_scenario: str or ~flow.models.ToolFuncCallScenario
"""
super(RetrieveToolFuncResultRequest, self).__init__(**kwargs)
self.func_path = func_path
self.func_kwargs = func_kwargs
self.func_call_scenario = func_call_scenario
class RetryConfiguration(msrest.serialization.Model):
"""RetryConfiguration.
:ivar max_retry_count:
:vartype max_retry_count: int
"""
_attribute_map = {
'max_retry_count': {'key': 'maxRetryCount', 'type': 'int'},
}
def __init__(
self,
*,
max_retry_count: Optional[int] = None,
**kwargs
):
"""
:keyword max_retry_count:
:paramtype max_retry_count: int
"""
super(RetryConfiguration, self).__init__(**kwargs)
self.max_retry_count = max_retry_count
class RGitHubPackage(msrest.serialization.Model):
"""RGitHubPackage.
:ivar repository:
:vartype repository: str
:ivar auth_token:
:vartype auth_token: str
"""
_attribute_map = {
'repository': {'key': 'repository', 'type': 'str'},
'auth_token': {'key': 'authToken', 'type': 'str'},
}
def __init__(
self,
*,
repository: Optional[str] = None,
auth_token: Optional[str] = None,
**kwargs
):
"""
:keyword repository:
:paramtype repository: str
:keyword auth_token:
:paramtype auth_token: str
"""
super(RGitHubPackage, self).__init__(**kwargs)
self.repository = repository
self.auth_token = auth_token
class RootError(msrest.serialization.Model):
"""The root error.
:ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:vartype code: str
:ivar severity: The Severity of error.
:vartype severity: int
:ivar message: A human-readable representation of the error.
:vartype message: str
:ivar message_format: An unformatted version of the message with no variable substitution.
:vartype message_format: str
:ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
:vartype message_parameters: dict[str, str]
:ivar reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:vartype reference_code: str
:ivar details_uri: A URI which points to more details about the context of the error.
:vartype details_uri: str
:ivar target: The target of the error (e.g., the name of the property in error).
:vartype target: str
:ivar details: The related errors that occurred during the request.
:vartype details: list[~flow.models.RootError]
:ivar inner_error: A nested structure of errors.
:vartype inner_error: ~flow.models.InnerErrorResponse
:ivar additional_info: The error additional info.
:vartype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'message_format': {'key': 'messageFormat', 'type': 'str'},
'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
'reference_code': {'key': 'referenceCode', 'type': 'str'},
'details_uri': {'key': 'detailsUri', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[RootError]'},
'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
}
def __init__(
self,
*,
code: Optional[str] = None,
severity: Optional[int] = None,
message: Optional[str] = None,
message_format: Optional[str] = None,
message_parameters: Optional[Dict[str, str]] = None,
reference_code: Optional[str] = None,
details_uri: Optional[str] = None,
target: Optional[str] = None,
details: Optional[List["RootError"]] = None,
inner_error: Optional["InnerErrorResponse"] = None,
additional_info: Optional[List["ErrorAdditionalInfo"]] = None,
**kwargs
):
"""
:keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
ValidationError, AzureStorageError, TransientError, RequestThrottled.
:paramtype code: str
:keyword severity: The Severity of error.
:paramtype severity: int
:keyword message: A human-readable representation of the error.
:paramtype message: str
:keyword message_format: An unformatted version of the message with no variable substitution.
:paramtype message_format: str
:keyword message_parameters: Value substitutions corresponding to the contents of
MessageFormat.
:paramtype message_parameters: dict[str, str]
:keyword reference_code: This code can optionally be set by the system generating the error.
It should be used to classify the problem and identify the module and code area where the
failure occured.
:paramtype reference_code: str
:keyword details_uri: A URI which points to more details about the context of the error.
:paramtype details_uri: str
:keyword target: The target of the error (e.g., the name of the property in error).
:paramtype target: str
:keyword details: The related errors that occurred during the request.
:paramtype details: list[~flow.models.RootError]
:keyword inner_error: A nested structure of errors.
:paramtype inner_error: ~flow.models.InnerErrorResponse
:keyword additional_info: The error additional info.
:paramtype additional_info: list[~flow.models.ErrorAdditionalInfo]
"""
super(RootError, self).__init__(**kwargs)
self.code = code
self.severity = severity
self.message = message
self.message_format = message_format
self.message_parameters = message_parameters
self.reference_code = reference_code
self.details_uri = details_uri
self.target = target
self.details = details
self.inner_error = inner_error
self.additional_info = additional_info
class RSection(msrest.serialization.Model):
"""RSection.
:ivar r_version:
:vartype r_version: str
:ivar user_managed:
:vartype user_managed: bool
:ivar rscript_path:
:vartype rscript_path: str
:ivar snapshot_date:
:vartype snapshot_date: str
:ivar cran_packages:
:vartype cran_packages: list[~flow.models.RCranPackage]
:ivar git_hub_packages:
:vartype git_hub_packages: list[~flow.models.RGitHubPackage]
:ivar custom_url_packages:
:vartype custom_url_packages: list[str]
:ivar bio_conductor_packages:
:vartype bio_conductor_packages: list[str]
"""
_attribute_map = {
'r_version': {'key': 'rVersion', 'type': 'str'},
'user_managed': {'key': 'userManaged', 'type': 'bool'},
'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
}
def __init__(
self,
*,
r_version: Optional[str] = None,
user_managed: Optional[bool] = None,
rscript_path: Optional[str] = None,
snapshot_date: Optional[str] = None,
cran_packages: Optional[List["RCranPackage"]] = None,
git_hub_packages: Optional[List["RGitHubPackage"]] = None,
custom_url_packages: Optional[List[str]] = None,
bio_conductor_packages: Optional[List[str]] = None,
**kwargs
):
"""
:keyword r_version:
:paramtype r_version: str
:keyword user_managed:
:paramtype user_managed: bool
:keyword rscript_path:
:paramtype rscript_path: str
:keyword snapshot_date:
:paramtype snapshot_date: str
:keyword cran_packages:
:paramtype cran_packages: list[~flow.models.RCranPackage]
:keyword git_hub_packages:
:paramtype git_hub_packages: list[~flow.models.RGitHubPackage]
:keyword custom_url_packages:
:paramtype custom_url_packages: list[str]
:keyword bio_conductor_packages:
:paramtype bio_conductor_packages: list[str]
"""
super(RSection, self).__init__(**kwargs)
self.r_version = r_version
self.user_managed = user_managed
self.rscript_path = rscript_path
self.snapshot_date = snapshot_date
self.cran_packages = cran_packages
self.git_hub_packages = git_hub_packages
self.custom_url_packages = custom_url_packages
self.bio_conductor_packages = bio_conductor_packages
class RunAnnotations(msrest.serialization.Model):
"""RunAnnotations.
:ivar display_name:
:vartype display_name: str
:ivar status:
:vartype status: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar estimated_cost:
:vartype estimated_cost: float
:ivar primary_metric_summary:
:vartype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:ivar metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:vartype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar modified_time:
:vartype modified_time: ~datetime.datetime
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar error:
:vartype error: ~flow.models.IndexedErrorResponse
:ivar resource_metric_summary:
:vartype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar compute_duration:
:vartype compute_duration: str
:ivar compute_duration_milliseconds:
:vartype compute_duration_milliseconds: float
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar archived:
:vartype archived: bool
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'estimated_cost': {'key': 'estimatedCost', 'type': 'float'},
'primary_metric_summary': {'key': 'primaryMetricSummary', 'type': 'RunIndexMetricSummary'},
'metrics': {'key': 'metrics', 'type': '{RunIndexMetricSummarySystemObject}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'settings': {'key': 'settings', 'type': '{str}'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'error': {'key': 'error', 'type': 'IndexedErrorResponse'},
'resource_metric_summary': {'key': 'resourceMetricSummary', 'type': 'RunIndexResourceMetricSummary'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'compute_duration_milliseconds': {'key': 'computeDurationMilliseconds', 'type': 'float'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'archived': {'key': 'archived', 'type': 'bool'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
display_name: Optional[str] = None,
status: Optional[str] = None,
primary_metric_name: Optional[str] = None,
estimated_cost: Optional[float] = None,
primary_metric_summary: Optional["RunIndexMetricSummary"] = None,
metrics: Optional[Dict[str, "RunIndexMetricSummarySystemObject"]] = None,
parameters: Optional[Dict[str, Any]] = None,
settings: Optional[Dict[str, str]] = None,
modified_time: Optional[datetime.datetime] = None,
retain_for_lifetime_of_workspace: Optional[bool] = None,
error: Optional["IndexedErrorResponse"] = None,
resource_metric_summary: Optional["RunIndexResourceMetricSummary"] = None,
job_cost: Optional["JobCost"] = None,
compute_duration: Optional[str] = None,
compute_duration_milliseconds: Optional[float] = None,
effective_start_time_utc: Optional[datetime.datetime] = None,
name: Optional[str] = None,
description: Optional[str] = None,
archived: Optional[bool] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword status:
:paramtype status: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword estimated_cost:
:paramtype estimated_cost: float
:keyword primary_metric_summary:
:paramtype primary_metric_summary: ~flow.models.RunIndexMetricSummary
:keyword metrics: Dictionary of :code:`<RunIndexMetricSummarySystemObject>`.
:paramtype metrics: dict[str, ~flow.models.RunIndexMetricSummarySystemObject]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword modified_time:
:paramtype modified_time: ~datetime.datetime
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword error:
:paramtype error: ~flow.models.IndexedErrorResponse
:keyword resource_metric_summary:
:paramtype resource_metric_summary: ~flow.models.RunIndexResourceMetricSummary
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword compute_duration:
:paramtype compute_duration: str
:keyword compute_duration_milliseconds:
:paramtype compute_duration_milliseconds: float
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword archived:
:paramtype archived: bool
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunAnnotations, self).__init__(**kwargs)
self.display_name = display_name
self.status = status
self.primary_metric_name = primary_metric_name
self.estimated_cost = estimated_cost
self.primary_metric_summary = primary_metric_summary
self.metrics = metrics
self.parameters = parameters
self.settings = settings
self.modified_time = modified_time
self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
self.error = error
self.resource_metric_summary = resource_metric_summary
self.job_cost = job_cost
self.compute_duration = compute_duration
self.compute_duration_milliseconds = compute_duration_milliseconds
self.effective_start_time_utc = effective_start_time_utc
self.name = name
self.description = description
self.archived = archived
self.tags = tags
class RunConfiguration(msrest.serialization.Model):
"""RunConfiguration.
:ivar script:
:vartype script: str
:ivar script_type: Possible values include: "Python", "Notebook".
:vartype script_type: str or ~flow.models.ScriptType
:ivar command:
:vartype command: str
:ivar use_absolute_path:
:vartype use_absolute_path: bool
:ivar arguments:
:vartype arguments: list[str]
:ivar framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow", "PyTorch",
"PySparkInteractive", "R".
:vartype framework: str or ~flow.models.Framework
:ivar communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi", "Nccl",
"ParallelTask".
:vartype communicator: str or ~flow.models.Communicator
:ivar target:
:vartype target: str
:ivar auto_cluster_compute_specification:
:vartype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:ivar data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:vartype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:ivar data: Dictionary of :code:`<Data>`.
:vartype data: dict[str, ~flow.models.Data]
:ivar input_assets: Dictionary of :code:`<InputAsset>`.
:vartype input_assets: dict[str, ~flow.models.InputAsset]
:ivar output_data: Dictionary of :code:`<OutputData>`.
:vartype output_data: dict[str, ~flow.models.OutputData]
:ivar datacaches:
:vartype datacaches: list[~flow.models.DatacacheConfiguration]
:ivar job_name:
:vartype job_name: str
:ivar max_run_duration_seconds:
:vartype max_run_duration_seconds: long
:ivar node_count:
:vartype node_count: int
:ivar max_node_count:
:vartype max_node_count: int
:ivar instance_types:
:vartype instance_types: list[str]
:ivar priority:
:vartype priority: int
:ivar credential_passthrough:
:vartype credential_passthrough: bool
:ivar identity:
:vartype identity: ~flow.models.IdentityConfiguration
:ivar environment:
:vartype environment: ~flow.models.EnvironmentDefinition
:ivar history:
:vartype history: ~flow.models.HistoryConfiguration
:ivar spark:
:vartype spark: ~flow.models.SparkConfiguration
:ivar parallel_task:
:vartype parallel_task: ~flow.models.ParallelTaskConfiguration
:ivar tensorflow:
:vartype tensorflow: ~flow.models.TensorflowConfiguration
:ivar mpi:
:vartype mpi: ~flow.models.MpiConfiguration
:ivar py_torch:
:vartype py_torch: ~flow.models.PyTorchConfiguration
:ivar ray:
:vartype ray: ~flow.models.RayConfiguration
:ivar hdi:
:vartype hdi: ~flow.models.HdiConfiguration
:ivar docker:
:vartype docker: ~flow.models.DockerConfiguration
:ivar command_return_code_config:
:vartype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:vartype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:ivar parameters:
:vartype parameters: list[~flow.models.ParameterDefinition]
:ivar autologger_settings:
:vartype autologger_settings: ~flow.models.AutologgerSettings
:ivar data_bricks:
:vartype data_bricks: ~flow.models.DatabricksConfiguration
:ivar training_diagnostic_config:
:vartype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:ivar secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
_attribute_map = {
'script': {'key': 'script', 'type': 'str'},
'script_type': {'key': 'scriptType', 'type': 'str'},
'command': {'key': 'command', 'type': 'str'},
'use_absolute_path': {'key': 'useAbsolutePath', 'type': 'bool'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'framework': {'key': 'framework', 'type': 'str'},
'communicator': {'key': 'communicator', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'auto_cluster_compute_specification': {'key': 'autoClusterComputeSpecification', 'type': 'AutoClusterComputeSpecification'},
'data_references': {'key': 'dataReferences', 'type': '{DataReferenceConfiguration}'},
'data': {'key': 'data', 'type': '{Data}'},
'input_assets': {'key': 'inputAssets', 'type': '{InputAsset}'},
'output_data': {'key': 'outputData', 'type': '{OutputData}'},
'datacaches': {'key': 'datacaches', 'type': '[DatacacheConfiguration]'},
'job_name': {'key': 'jobName', 'type': 'str'},
'max_run_duration_seconds': {'key': 'maxRunDurationSeconds', 'type': 'long'},
'node_count': {'key': 'nodeCount', 'type': 'int'},
'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'priority': {'key': 'priority', 'type': 'int'},
'credential_passthrough': {'key': 'credentialPassthrough', 'type': 'bool'},
'identity': {'key': 'identity', 'type': 'IdentityConfiguration'},
'environment': {'key': 'environment', 'type': 'EnvironmentDefinition'},
'history': {'key': 'history', 'type': 'HistoryConfiguration'},
'spark': {'key': 'spark', 'type': 'SparkConfiguration'},
'parallel_task': {'key': 'parallelTask', 'type': 'ParallelTaskConfiguration'},
'tensorflow': {'key': 'tensorflow', 'type': 'TensorflowConfiguration'},
'mpi': {'key': 'mpi', 'type': 'MpiConfiguration'},
'py_torch': {'key': 'pyTorch', 'type': 'PyTorchConfiguration'},
'ray': {'key': 'ray', 'type': 'RayConfiguration'},
'hdi': {'key': 'hdi', 'type': 'HdiConfiguration'},
'docker': {'key': 'docker', 'type': 'DockerConfiguration'},
'command_return_code_config': {'key': 'commandReturnCodeConfig', 'type': 'CommandReturnCodeConfig'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'application_endpoints': {'key': 'applicationEndpoints', 'type': '{ApplicationEndpointConfiguration}'},
'parameters': {'key': 'parameters', 'type': '[ParameterDefinition]'},
'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'},
'data_bricks': {'key': 'dataBricks', 'type': 'DatabricksConfiguration'},
'training_diagnostic_config': {'key': 'trainingDiagnosticConfig', 'type': 'TrainingDiagnosticConfiguration'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'},
}
def __init__(
self,
*,
script: Optional[str] = None,
script_type: Optional[Union[str, "ScriptType"]] = None,
command: Optional[str] = None,
use_absolute_path: Optional[bool] = None,
arguments: Optional[List[str]] = None,
framework: Optional[Union[str, "Framework"]] = None,
communicator: Optional[Union[str, "Communicator"]] = None,
target: Optional[str] = None,
auto_cluster_compute_specification: Optional["AutoClusterComputeSpecification"] = None,
data_references: Optional[Dict[str, "DataReferenceConfiguration"]] = None,
data: Optional[Dict[str, "Data"]] = None,
input_assets: Optional[Dict[str, "InputAsset"]] = None,
output_data: Optional[Dict[str, "OutputData"]] = None,
datacaches: Optional[List["DatacacheConfiguration"]] = None,
job_name: Optional[str] = None,
max_run_duration_seconds: Optional[int] = None,
node_count: Optional[int] = None,
max_node_count: Optional[int] = None,
instance_types: Optional[List[str]] = None,
priority: Optional[int] = None,
credential_passthrough: Optional[bool] = None,
identity: Optional["IdentityConfiguration"] = None,
environment: Optional["EnvironmentDefinition"] = None,
history: Optional["HistoryConfiguration"] = None,
spark: Optional["SparkConfiguration"] = None,
parallel_task: Optional["ParallelTaskConfiguration"] = None,
tensorflow: Optional["TensorflowConfiguration"] = None,
mpi: Optional["MpiConfiguration"] = None,
py_torch: Optional["PyTorchConfiguration"] = None,
ray: Optional["RayConfiguration"] = None,
hdi: Optional["HdiConfiguration"] = None,
docker: Optional["DockerConfiguration"] = None,
command_return_code_config: Optional["CommandReturnCodeConfig"] = None,
environment_variables: Optional[Dict[str, str]] = None,
application_endpoints: Optional[Dict[str, "ApplicationEndpointConfiguration"]] = None,
parameters: Optional[List["ParameterDefinition"]] = None,
autologger_settings: Optional["AutologgerSettings"] = None,
data_bricks: Optional["DatabricksConfiguration"] = None,
training_diagnostic_config: Optional["TrainingDiagnosticConfiguration"] = None,
secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None,
**kwargs
):
"""
:keyword script:
:paramtype script: str
:keyword script_type: Possible values include: "Python", "Notebook".
:paramtype script_type: str or ~flow.models.ScriptType
:keyword command:
:paramtype command: str
:keyword use_absolute_path:
:paramtype use_absolute_path: bool
:keyword arguments:
:paramtype arguments: list[str]
:keyword framework: Possible values include: "Python", "PySpark", "Cntk", "TensorFlow",
"PyTorch", "PySparkInteractive", "R".
:paramtype framework: str or ~flow.models.Framework
:keyword communicator: Possible values include: "None", "ParameterServer", "Gloo", "Mpi",
"Nccl", "ParallelTask".
:paramtype communicator: str or ~flow.models.Communicator
:keyword target:
:paramtype target: str
:keyword auto_cluster_compute_specification:
:paramtype auto_cluster_compute_specification: ~flow.models.AutoClusterComputeSpecification
:keyword data_references: Dictionary of :code:`<DataReferenceConfiguration>`.
:paramtype data_references: dict[str, ~flow.models.DataReferenceConfiguration]
:keyword data: Dictionary of :code:`<Data>`.
:paramtype data: dict[str, ~flow.models.Data]
:keyword input_assets: Dictionary of :code:`<InputAsset>`.
:paramtype input_assets: dict[str, ~flow.models.InputAsset]
:keyword output_data: Dictionary of :code:`<OutputData>`.
:paramtype output_data: dict[str, ~flow.models.OutputData]
:keyword datacaches:
:paramtype datacaches: list[~flow.models.DatacacheConfiguration]
:keyword job_name:
:paramtype job_name: str
:keyword max_run_duration_seconds:
:paramtype max_run_duration_seconds: long
:keyword node_count:
:paramtype node_count: int
:keyword max_node_count:
:paramtype max_node_count: int
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword priority:
:paramtype priority: int
:keyword credential_passthrough:
:paramtype credential_passthrough: bool
:keyword identity:
:paramtype identity: ~flow.models.IdentityConfiguration
:keyword environment:
:paramtype environment: ~flow.models.EnvironmentDefinition
:keyword history:
:paramtype history: ~flow.models.HistoryConfiguration
:keyword spark:
:paramtype spark: ~flow.models.SparkConfiguration
:keyword parallel_task:
:paramtype parallel_task: ~flow.models.ParallelTaskConfiguration
:keyword tensorflow:
:paramtype tensorflow: ~flow.models.TensorflowConfiguration
:keyword mpi:
:paramtype mpi: ~flow.models.MpiConfiguration
:keyword py_torch:
:paramtype py_torch: ~flow.models.PyTorchConfiguration
:keyword ray:
:paramtype ray: ~flow.models.RayConfiguration
:keyword hdi:
:paramtype hdi: ~flow.models.HdiConfiguration
:keyword docker:
:paramtype docker: ~flow.models.DockerConfiguration
:keyword command_return_code_config:
:paramtype command_return_code_config: ~flow.models.CommandReturnCodeConfig
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword application_endpoints: Dictionary of :code:`<ApplicationEndpointConfiguration>`.
:paramtype application_endpoints: dict[str, ~flow.models.ApplicationEndpointConfiguration]
:keyword parameters:
:paramtype parameters: list[~flow.models.ParameterDefinition]
:keyword autologger_settings:
:paramtype autologger_settings: ~flow.models.AutologgerSettings
:keyword data_bricks:
:paramtype data_bricks: ~flow.models.DatabricksConfiguration
:keyword training_diagnostic_config:
:paramtype training_diagnostic_config: ~flow.models.TrainingDiagnosticConfiguration
:keyword secrets_configuration: Dictionary of :code:`<SecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.SecretConfiguration]
"""
super(RunConfiguration, self).__init__(**kwargs)
self.script = script
self.script_type = script_type
self.command = command
self.use_absolute_path = use_absolute_path
self.arguments = arguments
self.framework = framework
self.communicator = communicator
self.target = target
self.auto_cluster_compute_specification = auto_cluster_compute_specification
self.data_references = data_references
self.data = data
self.input_assets = input_assets
self.output_data = output_data
self.datacaches = datacaches
self.job_name = job_name
self.max_run_duration_seconds = max_run_duration_seconds
self.node_count = node_count
self.max_node_count = max_node_count
self.instance_types = instance_types
self.priority = priority
self.credential_passthrough = credential_passthrough
self.identity = identity
self.environment = environment
self.history = history
self.spark = spark
self.parallel_task = parallel_task
self.tensorflow = tensorflow
self.mpi = mpi
self.py_torch = py_torch
self.ray = ray
self.hdi = hdi
self.docker = docker
self.command_return_code_config = command_return_code_config
self.environment_variables = environment_variables
self.application_endpoints = application_endpoints
self.parameters = parameters
self.autologger_settings = autologger_settings
self.data_bricks = data_bricks
self.training_diagnostic_config = training_diagnostic_config
self.secrets_configuration = secrets_configuration
class RunDatasetReference(msrest.serialization.Model):
"""RunDatasetReference.
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword version:
:paramtype version: str
"""
super(RunDatasetReference, self).__init__(**kwargs)
self.id = id
self.name = name
self.version = version
class RunDefinition(msrest.serialization.Model):
"""RunDefinition.
:ivar configuration:
:vartype configuration: ~flow.models.RunConfiguration
:ivar snapshot_id:
:vartype snapshot_id: str
:ivar snapshots:
:vartype snapshots: list[~flow.models.Snapshot]
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar display_name:
:vartype display_name: str
:ivar environment_asset_id:
:vartype environment_asset_id: str
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar description:
:vartype description: str
:ivar cancel_reason:
:vartype cancel_reason: str
:ivar properties: Dictionary of :code:`<string>`.
:vartype properties: dict[str, str]
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': 'RunConfiguration'},
'snapshot_id': {'key': 'snapshotId', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '[Snapshot]'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'environment_asset_id': {'key': 'environmentAssetId', 'type': 'str'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'cancel_reason': {'key': 'cancelReason', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
configuration: Optional["RunConfiguration"] = None,
snapshot_id: Optional[str] = None,
snapshots: Optional[List["Snapshot"]] = None,
parent_run_id: Optional[str] = None,
run_type: Optional[str] = None,
display_name: Optional[str] = None,
environment_asset_id: Optional[str] = None,
primary_metric_name: Optional[str] = None,
description: Optional[str] = None,
cancel_reason: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword configuration:
:paramtype configuration: ~flow.models.RunConfiguration
:keyword snapshot_id:
:paramtype snapshot_id: str
:keyword snapshots:
:paramtype snapshots: list[~flow.models.Snapshot]
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword display_name:
:paramtype display_name: str
:keyword environment_asset_id:
:paramtype environment_asset_id: str
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword description:
:paramtype description: str
:keyword cancel_reason:
:paramtype cancel_reason: str
:keyword properties: Dictionary of :code:`<string>`.
:paramtype properties: dict[str, str]
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
"""
super(RunDefinition, self).__init__(**kwargs)
self.configuration = configuration
self.snapshot_id = snapshot_id
self.snapshots = snapshots
self.parent_run_id = parent_run_id
self.run_type = run_type
self.display_name = display_name
self.environment_asset_id = environment_asset_id
self.primary_metric_name = primary_metric_name
self.description = description
self.cancel_reason = cancel_reason
self.properties = properties
self.tags = tags
class RunDetailsDto(msrest.serialization.Model):
"""RunDetailsDto.
:ivar run_id:
:vartype run_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar target:
:vartype target: str
:ivar status:
:vartype status: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar created_time_utc:
:vartype created_time_utc: ~datetime.datetime
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar services: This is a dictionary.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar log_files: This is a dictionary.
:vartype log_files: dict[str, str]
:ivar job_cost:
:vartype job_cost: ~flow.models.JobCost
:ivar revision:
:vartype revision: long
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar settings: This is a dictionary.
:vartype settings: dict[str, str]
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar user_id:
:vartype user_id: str
:ivar status_revision:
:vartype status_revision: long
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
:ivar current_attempt_id:
:vartype current_attempt_id: int
"""
_validation = {
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'log_files': {'key': 'logFiles', 'type': '{str}'},
'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
'revision': {'key': 'revision', 'type': 'long'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'settings': {'key': 'settings', 'type': '{str}'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'user_id': {'key': 'userId', 'type': 'str'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
run_uuid: Optional[str] = None,
parent_run_uuid: Optional[str] = None,
root_run_uuid: Optional[str] = None,
target: Optional[str] = None,
status: Optional[str] = None,
parent_run_id: Optional[str] = None,
data_container_id: Optional[str] = None,
created_time_utc: Optional[datetime.datetime] = None,
start_time_utc: Optional[datetime.datetime] = None,
end_time_utc: Optional[datetime.datetime] = None,
error: Optional["ErrorResponse"] = None,
warnings: Optional[List["RunDetailsWarningDto"]] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
parameters: Optional[Dict[str, Any]] = None,
services: Optional[Dict[str, "EndpointSetting"]] = None,
input_datasets: Optional[List["DatasetLineage"]] = None,
output_datasets: Optional[List["OutputDatasetLineage"]] = None,
run_definition: Optional[Any] = None,
log_files: Optional[Dict[str, str]] = None,
job_cost: Optional["JobCost"] = None,
revision: Optional[int] = None,
run_type_v2: Optional["RunTypeV2"] = None,
settings: Optional[Dict[str, str]] = None,
compute_request: Optional["ComputeRequest"] = None,
compute: Optional["Compute"] = None,
created_by: Optional["User"] = None,
compute_duration: Optional[str] = None,
effective_start_time_utc: Optional[datetime.datetime] = None,
run_number: Optional[int] = None,
root_run_id: Optional[str] = None,
experiment_id: Optional[str] = None,
user_id: Optional[str] = None,
status_revision: Optional[int] = None,
current_compute_time: Optional[str] = None,
last_start_time_utc: Optional[datetime.datetime] = None,
last_modified_by: Optional["User"] = None,
last_modified_utc: Optional[datetime.datetime] = None,
duration: Optional[str] = None,
inputs: Optional[Dict[str, "TypedAssetReference"]] = None,
outputs: Optional[Dict[str, "TypedAssetReference"]] = None,
current_attempt_id: Optional[int] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword target:
:paramtype target: str
:keyword status:
:paramtype status: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword created_time_utc:
:paramtype created_time_utc: ~datetime.datetime
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword services: This is a dictionary.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword log_files: This is a dictionary.
:paramtype log_files: dict[str, str]
:keyword job_cost:
:paramtype job_cost: ~flow.models.JobCost
:keyword revision:
:paramtype revision: long
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword settings: This is a dictionary.
:paramtype settings: dict[str, str]
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword user_id:
:paramtype user_id: str
:keyword status_revision:
:paramtype status_revision: long
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
:keyword current_attempt_id:
:paramtype current_attempt_id: int
"""
super(RunDetailsDto, self).__init__(**kwargs)
self.run_id = run_id
self.run_uuid = run_uuid
self.parent_run_uuid = parent_run_uuid
self.root_run_uuid = root_run_uuid
self.target = target
self.status = status
self.parent_run_id = parent_run_id
self.data_container_id = data_container_id
self.created_time_utc = created_time_utc
self.start_time_utc = start_time_utc
self.end_time_utc = end_time_utc
self.error = error
self.warnings = warnings
self.tags = tags
self.properties = properties
self.parameters = parameters
self.services = services
self.input_datasets = input_datasets
self.output_datasets = output_datasets
self.run_definition = run_definition
self.log_files = log_files
self.job_cost = job_cost
self.revision = revision
self.run_type_v2 = run_type_v2
self.settings = settings
self.compute_request = compute_request
self.compute = compute
self.created_by = created_by
self.compute_duration = compute_duration
self.effective_start_time_utc = effective_start_time_utc
self.run_number = run_number
self.root_run_id = root_run_id
self.experiment_id = experiment_id
self.user_id = user_id
self.status_revision = status_revision
self.current_compute_time = current_compute_time
self.last_start_time_utc = last_start_time_utc
self.last_modified_by = last_modified_by
self.last_modified_utc = last_modified_utc
self.duration = duration
self.inputs = inputs
self.outputs = outputs
self.current_attempt_id = current_attempt_id
class RunDetailsWarningDto(msrest.serialization.Model):
"""RunDetailsWarningDto.
:ivar source:
:vartype source: str
:ivar message:
:vartype message: str
"""
_attribute_map = {
'source': {'key': 'source', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
source: Optional[str] = None,
message: Optional[str] = None,
**kwargs
):
"""
:keyword source:
:paramtype source: str
:keyword message:
:paramtype message: str
"""
super(RunDetailsWarningDto, self).__init__(**kwargs)
self.source = source
self.message = message
class RunDto(msrest.serialization.Model):
"""RunDto.
:ivar run_number:
:vartype run_number: int
:ivar root_run_id:
:vartype root_run_id: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar created_by:
:vartype created_by: ~flow.models.User
:ivar user_id:
:vartype user_id: str
:ivar token:
:vartype token: str
:ivar token_expiry_time_utc:
:vartype token_expiry_time_utc: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar warnings:
:vartype warnings: list[~flow.models.RunDetailsWarningDto]
:ivar revision:
:vartype revision: long
:ivar status_revision:
:vartype status_revision: long
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar root_run_uuid:
:vartype root_run_uuid: str
:ivar last_start_time_utc:
:vartype last_start_time_utc: ~datetime.datetime
:ivar current_compute_time:
:vartype current_compute_time: str
:ivar compute_duration:
:vartype compute_duration: str
:ivar effective_start_time_utc:
:vartype effective_start_time_utc: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: ~flow.models.User
:ivar last_modified_utc:
:vartype last_modified_utc: ~datetime.datetime
:ivar duration:
:vartype duration: str
:ivar cancelation_reason:
:vartype cancelation_reason: str
:ivar current_attempt_id:
:vartype current_attempt_id: int
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar status:
:vartype status: str
:ivar start_time_utc:
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc:
:vartype end_time_utc: ~datetime.datetime
:ivar schedule_id:
:vartype schedule_id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar description:
:vartype description: str
:ivar hidden:
:vartype hidden: bool
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar parameters: Dictionary of :code:`<any>`.
:vartype parameters: dict[str, any]
:ivar action_uris: Dictionary of :code:`<string>`.
:vartype action_uris: dict[str, str]
:ivar script_name:
:vartype script_name: str
:ivar target:
:vartype target: str
:ivar unique_child_run_compute_targets:
:vartype unique_child_run_compute_targets: list[str]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar settings: Dictionary of :code:`<string>`.
:vartype settings: dict[str, str]
:ivar services: Dictionary of :code:`<EndpointSetting>`.
:vartype services: dict[str, ~flow.models.EndpointSetting]
:ivar input_datasets:
:vartype input_datasets: list[~flow.models.DatasetLineage]
:ivar output_datasets:
:vartype output_datasets: list[~flow.models.OutputDatasetLineage]
:ivar run_definition: Anything.
:vartype run_definition: any
:ivar job_specification: Anything.
:vartype job_specification: any
:ivar primary_metric_name:
:vartype primary_metric_name: str
:ivar created_from:
:vartype created_from: ~flow.models.CreatedFromDto
:ivar cancel_uri:
:vartype cancel_uri: str
:ivar complete_uri:
:vartype complete_uri: str
:ivar diagnostics_uri:
:vartype diagnostics_uri: str
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar retain_for_lifetime_of_workspace:
:vartype retain_for_lifetime_of_workspace: bool
:ivar queueing_info:
:vartype queueing_info: ~flow.models.QueueingInfo
:ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype inputs: dict[str, ~flow.models.TypedAssetReference]
:ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
:vartype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
_validation = {
'unique_child_run_compute_targets': {'unique': True},
'input_datasets': {'unique': True},
'output_datasets': {'unique': True},
}
_attribute_map = {
'run_number': {'key': 'runNumber', 'type': 'int'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'User'},
'user_id': {'key': 'userId', 'type': 'str'},
'token': {'key': 'token', 'type': 'str'},
'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'warnings': {'key': 'warnings', 'type': '[RunDetailsWarningDto]'},
'revision': {'key': 'revision', 'type': 'long'},
'status_revision': {'key': 'statusRevision', 'type': 'long'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
'compute_duration': {'key': 'computeDuration', 'type': 'str'},
'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'str'},
'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'},
'current_attempt_id': {'key': 'currentAttemptId', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
'schedule_id': {'key': 'scheduleId', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'hidden': {'key': 'hidden', 'type': 'bool'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
'properties': {'key': 'properties', 'type': '{str}'},
'parameters': {'key': 'parameters', 'type': '{object}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
'tags': {'key': 'tags', 'type': '{str}'},
'settings': {'key': 'settings', 'type': '{str}'},
'services': {'key': 'services', 'type': '{EndpointSetting}'},
'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
'run_definition': {'key': 'runDefinition', 'type': 'object'},
'job_specification': {'key': 'jobSpecification', 'type': 'object'},
'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
'created_from': {'key': 'createdFrom', 'type': 'CreatedFromDto'},
'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
'complete_uri': {'key': 'completeUri', 'type': 'str'},
'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
}
def __init__(
self,
*,
run_number: Optional[int] = None,
root_run_id: Optional[str] = None,
created_utc: Optional[datetime.datetime] = None,
created_by: Optional["User"] = None,
user_id: Optional[str] = None,
token: Optional[str] = None,
token_expiry_time_utc: Optional[datetime.datetime] = None,
error: Optional["ErrorResponse"] = None,
warnings: Optional[List["RunDetailsWarningDto"]] = None,
revision: Optional[int] = None,
status_revision: Optional[int] = None,
run_uuid: Optional[str] = None,
parent_run_uuid: Optional[str] = None,
root_run_uuid: Optional[str] = None,
last_start_time_utc: Optional[datetime.datetime] = None,
current_compute_time: Optional[str] = None,
compute_duration: Optional[str] = None,
effective_start_time_utc: Optional[datetime.datetime] = None,
last_modified_by: Optional["User"] = None,
last_modified_utc: Optional[datetime.datetime] = None,
duration: Optional[str] = None,
cancelation_reason: Optional[str] = None,
current_attempt_id: Optional[int] = None,
run_id: Optional[str] = None,
parent_run_id: Optional[str] = None,
experiment_id: Optional[str] = None,
status: Optional[str] = None,
start_time_utc: Optional[datetime.datetime] = None,
end_time_utc: Optional[datetime.datetime] = None,
schedule_id: Optional[str] = None,
display_name: Optional[str] = None,
name: Optional[str] = None,
data_container_id: Optional[str] = None,
description: Optional[str] = None,
hidden: Optional[bool] = None,
run_type: Optional[str] = None,
run_type_v2: Optional["RunTypeV2"] = None,
properties: Optional[Dict[str, str]] = None,
parameters: Optional[Dict[str, Any]] = None,
action_uris: Optional[Dict[str, str]] = None,
script_name: Optional[str] = None,
target: Optional[str] = None,
unique_child_run_compute_targets: Optional[List[str]] = None,
tags: Optional[Dict[str, str]] = None,
settings: Optional[Dict[str, str]] = None,
services: Optional[Dict[str, "EndpointSetting"]] = None,
input_datasets: Optional[List["DatasetLineage"]] = None,
output_datasets: Optional[List["OutputDatasetLineage"]] = None,
run_definition: Optional[Any] = None,
job_specification: Optional[Any] = None,
primary_metric_name: Optional[str] = None,
created_from: Optional["CreatedFromDto"] = None,
cancel_uri: Optional[str] = None,
complete_uri: Optional[str] = None,
diagnostics_uri: Optional[str] = None,
compute_request: Optional["ComputeRequest"] = None,
compute: Optional["Compute"] = None,
retain_for_lifetime_of_workspace: Optional[bool] = None,
queueing_info: Optional["QueueingInfo"] = None,
inputs: Optional[Dict[str, "TypedAssetReference"]] = None,
outputs: Optional[Dict[str, "TypedAssetReference"]] = None,
**kwargs
):
"""
:keyword run_number:
:paramtype run_number: int
:keyword root_run_id:
:paramtype root_run_id: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword created_by:
:paramtype created_by: ~flow.models.User
:keyword user_id:
:paramtype user_id: str
:keyword token:
:paramtype token: str
:keyword token_expiry_time_utc:
:paramtype token_expiry_time_utc: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword warnings:
:paramtype warnings: list[~flow.models.RunDetailsWarningDto]
:keyword revision:
:paramtype revision: long
:keyword status_revision:
:paramtype status_revision: long
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword root_run_uuid:
:paramtype root_run_uuid: str
:keyword last_start_time_utc:
:paramtype last_start_time_utc: ~datetime.datetime
:keyword current_compute_time:
:paramtype current_compute_time: str
:keyword compute_duration:
:paramtype compute_duration: str
:keyword effective_start_time_utc:
:paramtype effective_start_time_utc: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: ~flow.models.User
:keyword last_modified_utc:
:paramtype last_modified_utc: ~datetime.datetime
:keyword duration:
:paramtype duration: str
:keyword cancelation_reason:
:paramtype cancelation_reason: str
:keyword current_attempt_id:
:paramtype current_attempt_id: int
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword status:
:paramtype status: str
:keyword start_time_utc:
:paramtype start_time_utc: ~datetime.datetime
:keyword end_time_utc:
:paramtype end_time_utc: ~datetime.datetime
:keyword schedule_id:
:paramtype schedule_id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword description:
:paramtype description: str
:keyword hidden:
:paramtype hidden: bool
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword parameters: Dictionary of :code:`<any>`.
:paramtype parameters: dict[str, any]
:keyword action_uris: Dictionary of :code:`<string>`.
:paramtype action_uris: dict[str, str]
:keyword script_name:
:paramtype script_name: str
:keyword target:
:paramtype target: str
:keyword unique_child_run_compute_targets:
:paramtype unique_child_run_compute_targets: list[str]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword settings: Dictionary of :code:`<string>`.
:paramtype settings: dict[str, str]
:keyword services: Dictionary of :code:`<EndpointSetting>`.
:paramtype services: dict[str, ~flow.models.EndpointSetting]
:keyword input_datasets:
:paramtype input_datasets: list[~flow.models.DatasetLineage]
:keyword output_datasets:
:paramtype output_datasets: list[~flow.models.OutputDatasetLineage]
:keyword run_definition: Anything.
:paramtype run_definition: any
:keyword job_specification: Anything.
:paramtype job_specification: any
:keyword primary_metric_name:
:paramtype primary_metric_name: str
:keyword created_from:
:paramtype created_from: ~flow.models.CreatedFromDto
:keyword cancel_uri:
:paramtype cancel_uri: str
:keyword complete_uri:
:paramtype complete_uri: str
:keyword diagnostics_uri:
:paramtype diagnostics_uri: str
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword retain_for_lifetime_of_workspace:
:paramtype retain_for_lifetime_of_workspace: bool
:keyword queueing_info:
:paramtype queueing_info: ~flow.models.QueueingInfo
:keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype inputs: dict[str, ~flow.models.TypedAssetReference]
:keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
:paramtype outputs: dict[str, ~flow.models.TypedAssetReference]
"""
super(RunDto, self).__init__(**kwargs)
self.run_number = run_number
self.root_run_id = root_run_id
self.created_utc = created_utc
self.created_by = created_by
self.user_id = user_id
self.token = token
self.token_expiry_time_utc = token_expiry_time_utc
self.error = error
self.warnings = warnings
self.revision = revision
self.status_revision = status_revision
self.run_uuid = run_uuid
self.parent_run_uuid = parent_run_uuid
self.root_run_uuid = root_run_uuid
self.last_start_time_utc = last_start_time_utc
self.current_compute_time = current_compute_time
self.compute_duration = compute_duration
self.effective_start_time_utc = effective_start_time_utc
self.last_modified_by = last_modified_by
self.last_modified_utc = last_modified_utc
self.duration = duration
self.cancelation_reason = cancelation_reason
self.current_attempt_id = current_attempt_id
self.run_id = run_id
self.parent_run_id = parent_run_id
self.experiment_id = experiment_id
self.status = status
self.start_time_utc = start_time_utc
self.end_time_utc = end_time_utc
self.schedule_id = schedule_id
self.display_name = display_name
self.name = name
self.data_container_id = data_container_id
self.description = description
self.hidden = hidden
self.run_type = run_type
self.run_type_v2 = run_type_v2
self.properties = properties
self.parameters = parameters
self.action_uris = action_uris
self.script_name = script_name
self.target = target
self.unique_child_run_compute_targets = unique_child_run_compute_targets
self.tags = tags
self.settings = settings
self.services = services
self.input_datasets = input_datasets
self.output_datasets = output_datasets
self.run_definition = run_definition
self.job_specification = job_specification
self.primary_metric_name = primary_metric_name
self.created_from = created_from
self.cancel_uri = cancel_uri
self.complete_uri = complete_uri
self.diagnostics_uri = diagnostics_uri
self.compute_request = compute_request
self.compute = compute
self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
self.queueing_info = queueing_info
self.inputs = inputs
self.outputs = outputs
class RunIndexEntity(msrest.serialization.Model):
"""RunIndexEntity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar schema_id:
:vartype schema_id: str
:ivar entity_id:
:vartype entity_id: str
:ivar kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:vartype kind: str or ~flow.models.EntityKind
:ivar annotations:
:vartype annotations: ~flow.models.RunAnnotations
:ivar properties:
:vartype properties: ~flow.models.RunProperties
:ivar internal: Any object.
:vartype internal: any
:ivar update_sequence:
:vartype update_sequence: long
:ivar type:
:vartype type: str
:ivar version:
:vartype version: str
:ivar entity_container_id:
:vartype entity_container_id: str
:ivar entity_object_id:
:vartype entity_object_id: str
:ivar resource_type:
:vartype resource_type: str
:ivar relationships:
:vartype relationships: list[~flow.models.Relationship]
:ivar asset_id:
:vartype asset_id: str
"""
_validation = {
'version': {'readonly': True},
'entity_container_id': {'readonly': True},
'entity_object_id': {'readonly': True},
'resource_type': {'readonly': True},
}
_attribute_map = {
'schema_id': {'key': 'schemaId', 'type': 'str'},
'entity_id': {'key': 'entityId', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': 'RunAnnotations'},
'properties': {'key': 'properties', 'type': 'RunProperties'},
'internal': {'key': 'internal', 'type': 'object'},
'update_sequence': {'key': 'updateSequence', 'type': 'long'},
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'entity_container_id': {'key': 'entityContainerId', 'type': 'str'},
'entity_object_id': {'key': 'entityObjectId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'relationships': {'key': 'relationships', 'type': '[Relationship]'},
'asset_id': {'key': 'assetId', 'type': 'str'},
}
def __init__(
self,
*,
schema_id: Optional[str] = None,
entity_id: Optional[str] = None,
kind: Optional[Union[str, "EntityKind"]] = None,
annotations: Optional["RunAnnotations"] = None,
properties: Optional["RunProperties"] = None,
internal: Optional[Any] = None,
update_sequence: Optional[int] = None,
type: Optional[str] = None,
relationships: Optional[List["Relationship"]] = None,
asset_id: Optional[str] = None,
**kwargs
):
"""
:keyword schema_id:
:paramtype schema_id: str
:keyword entity_id:
:paramtype entity_id: str
:keyword kind: Possible values include: "Invalid", "LineageRoot", "Versioned", "Unversioned".
:paramtype kind: str or ~flow.models.EntityKind
:keyword annotations:
:paramtype annotations: ~flow.models.RunAnnotations
:keyword properties:
:paramtype properties: ~flow.models.RunProperties
:keyword internal: Any object.
:paramtype internal: any
:keyword update_sequence:
:paramtype update_sequence: long
:keyword type:
:paramtype type: str
:keyword relationships:
:paramtype relationships: list[~flow.models.Relationship]
:keyword asset_id:
:paramtype asset_id: str
"""
super(RunIndexEntity, self).__init__(**kwargs)
self.schema_id = schema_id
self.entity_id = entity_id
self.kind = kind
self.annotations = annotations
self.properties = properties
self.internal = internal
self.update_sequence = update_sequence
self.type = type
self.version = None
self.entity_container_id = None
self.entity_object_id = None
self.resource_type = None
self.relationships = relationships
self.asset_id = asset_id
class RunIndexMetricSummary(msrest.serialization.Model):
"""RunIndexMetricSummary.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
*,
count: Optional[int] = None,
last_value: Optional[Any] = None,
minimum_value: Optional[Any] = None,
maximum_value: Optional[Any] = None,
metric_type: Optional[str] = None,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummary, self).__init__(**kwargs)
self.count = count
self.last_value = last_value
self.minimum_value = minimum_value
self.maximum_value = maximum_value
self.metric_type = metric_type
class RunIndexMetricSummarySystemObject(msrest.serialization.Model):
"""RunIndexMetricSummarySystemObject.
:ivar count:
:vartype count: long
:ivar last_value: Anything.
:vartype last_value: any
:ivar minimum_value: Anything.
:vartype minimum_value: any
:ivar maximum_value: Anything.
:vartype maximum_value: any
:ivar metric_type:
:vartype metric_type: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'long'},
'last_value': {'key': 'lastValue', 'type': 'object'},
'minimum_value': {'key': 'minimumValue', 'type': 'object'},
'maximum_value': {'key': 'maximumValue', 'type': 'object'},
'metric_type': {'key': 'metricType', 'type': 'str'},
}
def __init__(
self,
*,
count: Optional[int] = None,
last_value: Optional[Any] = None,
minimum_value: Optional[Any] = None,
maximum_value: Optional[Any] = None,
metric_type: Optional[str] = None,
**kwargs
):
"""
:keyword count:
:paramtype count: long
:keyword last_value: Anything.
:paramtype last_value: any
:keyword minimum_value: Anything.
:paramtype minimum_value: any
:keyword maximum_value: Anything.
:paramtype maximum_value: any
:keyword metric_type:
:paramtype metric_type: str
"""
super(RunIndexMetricSummarySystemObject, self).__init__(**kwargs)
self.count = count
self.last_value = last_value
self.minimum_value = minimum_value
self.maximum_value = maximum_value
self.metric_type = metric_type
class RunIndexResourceMetricSummary(msrest.serialization.Model):
"""RunIndexResourceMetricSummary.
:ivar gpu_utilization_percent_last_hour:
:vartype gpu_utilization_percent_last_hour: float
:ivar gpu_memory_utilization_percent_last_hour:
:vartype gpu_memory_utilization_percent_last_hour: float
:ivar gpu_energy_joules:
:vartype gpu_energy_joules: float
:ivar resource_metric_names:
:vartype resource_metric_names: list[str]
"""
_attribute_map = {
'gpu_utilization_percent_last_hour': {'key': 'gpuUtilizationPercentLastHour', 'type': 'float'},
'gpu_memory_utilization_percent_last_hour': {'key': 'gpuMemoryUtilizationPercentLastHour', 'type': 'float'},
'gpu_energy_joules': {'key': 'gpuEnergyJoules', 'type': 'float'},
'resource_metric_names': {'key': 'resourceMetricNames', 'type': '[str]'},
}
def __init__(
self,
*,
gpu_utilization_percent_last_hour: Optional[float] = None,
gpu_memory_utilization_percent_last_hour: Optional[float] = None,
gpu_energy_joules: Optional[float] = None,
resource_metric_names: Optional[List[str]] = None,
**kwargs
):
"""
:keyword gpu_utilization_percent_last_hour:
:paramtype gpu_utilization_percent_last_hour: float
:keyword gpu_memory_utilization_percent_last_hour:
:paramtype gpu_memory_utilization_percent_last_hour: float
:keyword gpu_energy_joules:
:paramtype gpu_energy_joules: float
:keyword resource_metric_names:
:paramtype resource_metric_names: list[str]
"""
super(RunIndexResourceMetricSummary, self).__init__(**kwargs)
self.gpu_utilization_percent_last_hour = gpu_utilization_percent_last_hour
self.gpu_memory_utilization_percent_last_hour = gpu_memory_utilization_percent_last_hour
self.gpu_energy_joules = gpu_energy_joules
self.resource_metric_names = resource_metric_names
class RunMetricDto(msrest.serialization.Model):
"""RunMetricDto.
:ivar run_id:
:vartype run_id: str
:ivar metric_id:
:vartype metric_id: str
:ivar data_container_id:
:vartype data_container_id: str
:ivar metric_type:
:vartype metric_type: str
:ivar created_utc:
:vartype created_utc: ~datetime.datetime
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar label:
:vartype label: str
:ivar num_cells:
:vartype num_cells: int
:ivar data_location:
:vartype data_location: str
:ivar cells:
:vartype cells: list[dict[str, any]]
:ivar schema:
:vartype schema: ~flow.models.MetricSchemaDto
"""
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
'metric_id': {'key': 'metricId', 'type': 'str'},
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'metric_type': {'key': 'metricType', 'type': 'str'},
'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'num_cells': {'key': 'numCells', 'type': 'int'},
'data_location': {'key': 'dataLocation', 'type': 'str'},
'cells': {'key': 'cells', 'type': '[{object}]'},
'schema': {'key': 'schema', 'type': 'MetricSchemaDto'},
}
def __init__(
self,
*,
run_id: Optional[str] = None,
metric_id: Optional[str] = None,
data_container_id: Optional[str] = None,
metric_type: Optional[str] = None,
created_utc: Optional[datetime.datetime] = None,
name: Optional[str] = None,
description: Optional[str] = None,
label: Optional[str] = None,
num_cells: Optional[int] = None,
data_location: Optional[str] = None,
cells: Optional[List[Dict[str, Any]]] = None,
schema: Optional["MetricSchemaDto"] = None,
**kwargs
):
"""
:keyword run_id:
:paramtype run_id: str
:keyword metric_id:
:paramtype metric_id: str
:keyword data_container_id:
:paramtype data_container_id: str
:keyword metric_type:
:paramtype metric_type: str
:keyword created_utc:
:paramtype created_utc: ~datetime.datetime
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword label:
:paramtype label: str
:keyword num_cells:
:paramtype num_cells: int
:keyword data_location:
:paramtype data_location: str
:keyword cells:
:paramtype cells: list[dict[str, any]]
:keyword schema:
:paramtype schema: ~flow.models.MetricSchemaDto
"""
super(RunMetricDto, self).__init__(**kwargs)
self.run_id = run_id
self.metric_id = metric_id
self.data_container_id = data_container_id
self.metric_type = metric_type
self.created_utc = created_utc
self.name = name
self.description = description
self.label = label
self.num_cells = num_cells
self.data_location = data_location
self.cells = cells
self.schema = schema
class RunMetricsTypesDto(msrest.serialization.Model):
"""RunMetricsTypesDto.
:ivar name:
:vartype name: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type:
:paramtype type: str
"""
super(RunMetricsTypesDto, self).__init__(**kwargs)
self.name = name
self.type = type
class RunProperties(msrest.serialization.Model):
"""RunProperties.
:ivar data_container_id:
:vartype data_container_id: str
:ivar target_name:
:vartype target_name: str
:ivar run_name:
:vartype run_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar root_run_id:
:vartype root_run_id: str
:ivar run_type:
:vartype run_type: str
:ivar run_type_v2:
:vartype run_type_v2: ~flow.models.RunTypeV2Index
:ivar script_name:
:vartype script_name: str
:ivar experiment_id:
:vartype experiment_id: str
:ivar run_uuid:
:vartype run_uuid: str
:ivar parent_run_uuid:
:vartype parent_run_uuid: str
:ivar run_number:
:vartype run_number: int
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar compute_request:
:vartype compute_request: ~flow.models.ComputeRequest
:ivar compute:
:vartype compute: ~flow.models.Compute
:ivar user_properties: This is a dictionary.
:vartype user_properties: dict[str, str]
:ivar action_uris: This is a dictionary.
:vartype action_uris: dict[str, str]
:ivar duration:
:vartype duration: str
:ivar duration_milliseconds:
:vartype duration_milliseconds: float
:ivar creation_context:
:vartype creation_context: ~flow.models.CreationContext
"""
_attribute_map = {
'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
'target_name': {'key': 'targetName', 'type': 'str'},
'run_name': {'key': 'runName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'root_run_id': {'key': 'rootRunId', 'type': 'str'},
'run_type': {'key': 'runType', 'type': 'str'},
'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2Index'},
'script_name': {'key': 'scriptName', 'type': 'str'},
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'run_uuid': {'key': 'runUuid', 'type': 'str'},
'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
'run_number': {'key': 'runNumber', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
'compute': {'key': 'compute', 'type': 'Compute'},
'user_properties': {'key': 'userProperties', 'type': '{str}'},
'action_uris': {'key': 'actionUris', 'type': '{str}'},
'duration': {'key': 'duration', 'type': 'str'},
'duration_milliseconds': {'key': 'durationMilliseconds', 'type': 'float'},
'creation_context': {'key': 'creationContext', 'type': 'CreationContext'},
}
def __init__(
self,
*,
data_container_id: Optional[str] = None,
target_name: Optional[str] = None,
run_name: Optional[str] = None,
experiment_name: Optional[str] = None,
run_id: Optional[str] = None,
parent_run_id: Optional[str] = None,
root_run_id: Optional[str] = None,
run_type: Optional[str] = None,
run_type_v2: Optional["RunTypeV2Index"] = None,
script_name: Optional[str] = None,
experiment_id: Optional[str] = None,
run_uuid: Optional[str] = None,
parent_run_uuid: Optional[str] = None,
run_number: Optional[int] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
compute_request: Optional["ComputeRequest"] = None,
compute: Optional["Compute"] = None,
user_properties: Optional[Dict[str, str]] = None,
action_uris: Optional[Dict[str, str]] = None,
duration: Optional[str] = None,
duration_milliseconds: Optional[float] = None,
creation_context: Optional["CreationContext"] = None,
**kwargs
):
"""
:keyword data_container_id:
:paramtype data_container_id: str
:keyword target_name:
:paramtype target_name: str
:keyword run_name:
:paramtype run_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword root_run_id:
:paramtype root_run_id: str
:keyword run_type:
:paramtype run_type: str
:keyword run_type_v2:
:paramtype run_type_v2: ~flow.models.RunTypeV2Index
:keyword script_name:
:paramtype script_name: str
:keyword experiment_id:
:paramtype experiment_id: str
:keyword run_uuid:
:paramtype run_uuid: str
:keyword parent_run_uuid:
:paramtype parent_run_uuid: str
:keyword run_number:
:paramtype run_number: int
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword compute_request:
:paramtype compute_request: ~flow.models.ComputeRequest
:keyword compute:
:paramtype compute: ~flow.models.Compute
:keyword user_properties: This is a dictionary.
:paramtype user_properties: dict[str, str]
:keyword action_uris: This is a dictionary.
:paramtype action_uris: dict[str, str]
:keyword duration:
:paramtype duration: str
:keyword duration_milliseconds:
:paramtype duration_milliseconds: float
:keyword creation_context:
:paramtype creation_context: ~flow.models.CreationContext
"""
super(RunProperties, self).__init__(**kwargs)
self.data_container_id = data_container_id
self.target_name = target_name
self.run_name = run_name
self.experiment_name = experiment_name
self.run_id = run_id
self.parent_run_id = parent_run_id
self.root_run_id = root_run_id
self.run_type = run_type
self.run_type_v2 = run_type_v2
self.script_name = script_name
self.experiment_id = experiment_id
self.run_uuid = run_uuid
self.parent_run_uuid = parent_run_uuid
self.run_number = run_number
self.start_time = start_time
self.end_time = end_time
self.compute_request = compute_request
self.compute = compute
self.user_properties = user_properties
self.action_uris = action_uris
self.duration = duration
self.duration_milliseconds = duration_milliseconds
self.creation_context = creation_context
class RunSettingParameter(msrest.serialization.Model):
"""RunSettingParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool", "String",
"JsonString", "YamlString", "StringList".
:vartype parameter_type: str or ~flow.models.RunSettingParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar description:
:vartype description: str
:ivar run_setting_ui_hint:
:vartype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:ivar argument_name:
:vartype argument_name: str
:ivar section_name:
:vartype section_name: str
:ivar section_description:
:vartype section_description: str
:ivar section_argument_name:
:vartype section_argument_name: str
:ivar examples:
:vartype examples: list[str]
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar disabled_by_parameters:
:vartype disabled_by_parameters: list[str]
:ivar module_run_setting_type: Possible values include: "All", "Released", "Default",
"Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full".
:vartype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:ivar linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:vartype linked_parameter_default_value_mapping: dict[str, str]
:ivar linked_parameter_key_name:
:vartype linked_parameter_key_name: str
:ivar support_link_setting:
:vartype support_link_setting: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'run_setting_ui_hint': {'key': 'runSettingUIHint', 'type': 'RunSettingUIParameterHint'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
'section_name': {'key': 'sectionName', 'type': 'str'},
'section_description': {'key': 'sectionDescription', 'type': 'str'},
'section_argument_name': {'key': 'sectionArgumentName', 'type': 'str'},
'examples': {'key': 'examples', 'type': '[str]'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'disabled_by_parameters': {'key': 'disabledByParameters', 'type': '[str]'},
'module_run_setting_type': {'key': 'moduleRunSettingType', 'type': 'str'},
'linked_parameter_default_value_mapping': {'key': 'linkedParameterDefaultValueMapping', 'type': '{str}'},
'linked_parameter_key_name': {'key': 'linkedParameterKeyName', 'type': 'str'},
'support_link_setting': {'key': 'supportLinkSetting', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
parameter_type: Optional[Union[str, "RunSettingParameterType"]] = None,
is_optional: Optional[bool] = None,
default_value: Optional[str] = None,
lower_bound: Optional[str] = None,
upper_bound: Optional[str] = None,
description: Optional[str] = None,
run_setting_ui_hint: Optional["RunSettingUIParameterHint"] = None,
argument_name: Optional[str] = None,
section_name: Optional[str] = None,
section_description: Optional[str] = None,
section_argument_name: Optional[str] = None,
examples: Optional[List[str]] = None,
enum_values: Optional[List[str]] = None,
enum_values_to_argument_strings: Optional[Dict[str, str]] = None,
enabled_by_parameter_name: Optional[str] = None,
enabled_by_parameter_values: Optional[List[str]] = None,
disabled_by_parameters: Optional[List[str]] = None,
module_run_setting_type: Optional[Union[str, "ModuleRunSettingTypes"]] = None,
linked_parameter_default_value_mapping: Optional[Dict[str, str]] = None,
linked_parameter_key_name: Optional[str] = None,
support_link_setting: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Undefined", "Int", "Double", "Bool",
"String", "JsonString", "YamlString", "StringList".
:paramtype parameter_type: str or ~flow.models.RunSettingParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword description:
:paramtype description: str
:keyword run_setting_ui_hint:
:paramtype run_setting_ui_hint: ~flow.models.RunSettingUIParameterHint
:keyword argument_name:
:paramtype argument_name: str
:keyword section_name:
:paramtype section_name: str
:keyword section_description:
:paramtype section_description: str
:keyword section_argument_name:
:paramtype section_argument_name: str
:keyword examples:
:paramtype examples: list[str]
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword disabled_by_parameters:
:paramtype disabled_by_parameters: list[str]
:keyword module_run_setting_type: Possible values include: "All", "Released", "Default",
"Testing", "Legacy", "Preview", "UxFull", "Integration", "UxIntegration", "Full".
:paramtype module_run_setting_type: str or ~flow.models.ModuleRunSettingTypes
:keyword linked_parameter_default_value_mapping: Dictionary of :code:`<string>`.
:paramtype linked_parameter_default_value_mapping: dict[str, str]
:keyword linked_parameter_key_name:
:paramtype linked_parameter_key_name: str
:keyword support_link_setting:
:paramtype support_link_setting: bool
"""
super(RunSettingParameter, self).__init__(**kwargs)
self.name = name
self.label = label
self.parameter_type = parameter_type
self.is_optional = is_optional
self.default_value = default_value
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.description = description
self.run_setting_ui_hint = run_setting_ui_hint
self.argument_name = argument_name
self.section_name = section_name
self.section_description = section_description
self.section_argument_name = section_argument_name
self.examples = examples
self.enum_values = enum_values
self.enum_values_to_argument_strings = enum_values_to_argument_strings
self.enabled_by_parameter_name = enabled_by_parameter_name
self.enabled_by_parameter_values = enabled_by_parameter_values
self.disabled_by_parameters = disabled_by_parameters
self.module_run_setting_type = module_run_setting_type
self.linked_parameter_default_value_mapping = linked_parameter_default_value_mapping
self.linked_parameter_key_name = linked_parameter_key_name
self.support_link_setting = support_link_setting
class RunSettingParameterAssignment(msrest.serialization.Model):
"""RunSettingParameterAssignment.
:ivar use_graph_default_compute:
:vartype use_graph_default_compute: bool
:ivar mlc_compute_type:
:vartype mlc_compute_type: str
:ivar compute_run_settings:
:vartype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar linked_parameter_name:
:vartype linked_parameter_name: str
:ivar value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:vartype value_type: str or ~flow.models.ParameterValueType
:ivar assignments_to_concatenate:
:vartype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:ivar data_path_assignment:
:vartype data_path_assignment: ~flow.models.LegacyDataPath
:ivar data_set_definition_value_assignment:
:vartype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
"""
_attribute_map = {
'use_graph_default_compute': {'key': 'useGraphDefaultCompute', 'type': 'bool'},
'mlc_compute_type': {'key': 'mlcComputeType', 'type': 'str'},
'compute_run_settings': {'key': 'computeRunSettings', 'type': '[RunSettingParameterAssignment]'},
'linked_parameter_name': {'key': 'linkedParameterName', 'type': 'str'},
'value_type': {'key': 'valueType', 'type': 'str'},
'assignments_to_concatenate': {'key': 'assignmentsToConcatenate', 'type': '[ParameterAssignment]'},
'data_path_assignment': {'key': 'dataPathAssignment', 'type': 'LegacyDataPath'},
'data_set_definition_value_assignment': {'key': 'dataSetDefinitionValueAssignment', 'type': 'DataSetDefinitionValue'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
use_graph_default_compute: Optional[bool] = None,
mlc_compute_type: Optional[str] = None,
compute_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
linked_parameter_name: Optional[str] = None,
value_type: Optional[Union[str, "ParameterValueType"]] = None,
assignments_to_concatenate: Optional[List["ParameterAssignment"]] = None,
data_path_assignment: Optional["LegacyDataPath"] = None,
data_set_definition_value_assignment: Optional["DataSetDefinitionValue"] = None,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
"""
:keyword use_graph_default_compute:
:paramtype use_graph_default_compute: bool
:keyword mlc_compute_type:
:paramtype mlc_compute_type: str
:keyword compute_run_settings:
:paramtype compute_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword linked_parameter_name:
:paramtype linked_parameter_name: str
:keyword value_type: Possible values include: "Literal", "GraphParameterName", "Concatenate",
"Input", "DataPath", "DataSetDefinition".
:paramtype value_type: str or ~flow.models.ParameterValueType
:keyword assignments_to_concatenate:
:paramtype assignments_to_concatenate: list[~flow.models.ParameterAssignment]
:keyword data_path_assignment:
:paramtype data_path_assignment: ~flow.models.LegacyDataPath
:keyword data_set_definition_value_assignment:
:paramtype data_set_definition_value_assignment: ~flow.models.DataSetDefinitionValue
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
"""
super(RunSettingParameterAssignment, self).__init__(**kwargs)
self.use_graph_default_compute = use_graph_default_compute
self.mlc_compute_type = mlc_compute_type
self.compute_run_settings = compute_run_settings
self.linked_parameter_name = linked_parameter_name
self.value_type = value_type
self.assignments_to_concatenate = assignments_to_concatenate
self.data_path_assignment = data_path_assignment
self.data_set_definition_value_assignment = data_set_definition_value_assignment
self.name = name
self.value = value
class RunSettingUIParameterHint(msrest.serialization.Model):
"""RunSettingUIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:vartype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar yaml_editor:
:vartype yaml_editor: ~flow.models.UIYamlEditor
:ivar compute_selection:
:vartype compute_selection: ~flow.models.UIComputeSelection
:ivar hyperparameter_configuration:
:vartype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
:ivar support_reset:
:vartype support_reset: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'yaml_editor': {'key': 'yamlEditor', 'type': 'UIYamlEditor'},
'compute_selection': {'key': 'computeSelection', 'type': 'UIComputeSelection'},
'hyperparameter_configuration': {'key': 'hyperparameterConfiguration', 'type': 'UIHyperparameterConfiguration'},
'ux_ignore': {'key': 'uxIgnore', 'type': 'bool'},
'anonymous': {'key': 'anonymous', 'type': 'bool'},
'support_reset': {'key': 'supportReset', 'type': 'bool'},
}
def __init__(
self,
*,
ui_widget_type: Optional[Union[str, "RunSettingUIWidgetTypeEnum"]] = None,
json_editor: Optional["UIJsonEditor"] = None,
yaml_editor: Optional["UIYamlEditor"] = None,
compute_selection: Optional["UIComputeSelection"] = None,
hyperparameter_configuration: Optional["UIHyperparameterConfiguration"] = None,
ux_ignore: Optional[bool] = None,
anonymous: Optional[bool] = None,
support_reset: Optional[bool] = None,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "ComputeSelection", "JsonEditor",
"Mode", "SearchSpaceParameter", "SectionToggle", "YamlEditor", "EnableRuntimeSweep",
"DataStoreSelection", "Checkbox", "MultipleSelection", "HyperparameterConfiguration",
"JsonTextBox", "Connection", "Static".
:paramtype ui_widget_type: str or ~flow.models.RunSettingUIWidgetTypeEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword yaml_editor:
:paramtype yaml_editor: ~flow.models.UIYamlEditor
:keyword compute_selection:
:paramtype compute_selection: ~flow.models.UIComputeSelection
:keyword hyperparameter_configuration:
:paramtype hyperparameter_configuration: ~flow.models.UIHyperparameterConfiguration
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
:keyword support_reset:
:paramtype support_reset: bool
"""
super(RunSettingUIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = ui_widget_type
self.json_editor = json_editor
self.yaml_editor = yaml_editor
self.compute_selection = compute_selection
self.hyperparameter_configuration = hyperparameter_configuration
self.ux_ignore = ux_ignore
self.anonymous = anonymous
self.support_reset = support_reset
class RunStatusPeriod(msrest.serialization.Model):
"""RunStatusPeriod.
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
*,
status: Optional[Union[str, "RunStatus"]] = None,
sub_periods: Optional[List["SubStatusPeriod"]] = None,
start: Optional[int] = None,
end: Optional[int] = None,
**kwargs
):
"""
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(RunStatusPeriod, self).__init__(**kwargs)
self.status = status
self.sub_periods = sub_periods
self.start = start
self.end = end
class RuntimeConfiguration(msrest.serialization.Model):
"""RuntimeConfiguration.
:ivar base_image:
:vartype base_image: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'base_image': {'key': 'baseImage', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
base_image: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword base_image:
:paramtype base_image: str
:keyword version:
:paramtype version: str
"""
super(RuntimeConfiguration, self).__init__(**kwargs)
self.base_image = base_image
self.version = version
class RunTypeV2(msrest.serialization.Model):
"""RunTypeV2.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits:
:vartype traits: list[str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_validation = {
'traits': {'unique': True},
}
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '[str]'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
*,
orchestrator: Optional[str] = None,
traits: Optional[List[str]] = None,
attribution: Optional[str] = None,
compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits:
:paramtype traits: list[str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2, self).__init__(**kwargs)
self.orchestrator = orchestrator
self.traits = traits
self.attribution = attribution
self.compute_type = compute_type
class RunTypeV2Index(msrest.serialization.Model):
"""RunTypeV2Index.
:ivar orchestrator:
:vartype orchestrator: str
:ivar traits: Dictionary of :code:`<string>`.
:vartype traits: dict[str, str]
:ivar attribution:
:vartype attribution: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'orchestrator': {'key': 'orchestrator', 'type': 'str'},
'traits': {'key': 'traits', 'type': '{str}'},
'attribution': {'key': 'attribution', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
*,
orchestrator: Optional[str] = None,
traits: Optional[Dict[str, str]] = None,
attribution: Optional[str] = None,
compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword orchestrator:
:paramtype orchestrator: str
:keyword traits: Dictionary of :code:`<string>`.
:paramtype traits: dict[str, str]
:keyword attribution:
:paramtype attribution: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(RunTypeV2Index, self).__init__(**kwargs)
self.orchestrator = orchestrator
self.traits = traits
self.attribution = attribution
self.compute_type = compute_type
class SampleMeta(msrest.serialization.Model):
"""SampleMeta.
:ivar image:
:vartype image: str
:ivar id:
:vartype id: str
:ivar display_name:
:vartype display_name: str
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar doc_link:
:vartype doc_link: str
:ivar tags: A set of tags.
:vartype tags: list[str]
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar updated_at:
:vartype updated_at: ~datetime.datetime
:ivar feed_name:
:vartype feed_name: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'doc_link': {'key': 'docLink', 'type': 'str'},
'tags': {'key': 'tags', 'type': '[str]'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'updated_at': {'key': 'updatedAt', 'type': 'iso-8601'},
'feed_name': {'key': 'feedName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
image: Optional[str] = None,
id: Optional[str] = None,
display_name: Optional[str] = None,
name: Optional[str] = None,
description: Optional[str] = None,
doc_link: Optional[str] = None,
tags: Optional[List[str]] = None,
created_at: Optional[datetime.datetime] = None,
updated_at: Optional[datetime.datetime] = None,
feed_name: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword id:
:paramtype id: str
:keyword display_name:
:paramtype display_name: str
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword doc_link:
:paramtype doc_link: str
:keyword tags: A set of tags.
:paramtype tags: list[str]
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword updated_at:
:paramtype updated_at: ~datetime.datetime
:keyword feed_name:
:paramtype feed_name: str
:keyword version:
:paramtype version: str
"""
super(SampleMeta, self).__init__(**kwargs)
self.image = image
self.id = id
self.display_name = display_name
self.name = name
self.description = description
self.doc_link = doc_link
self.tags = tags
self.created_at = created_at
self.updated_at = updated_at
self.feed_name = feed_name
self.version = version
class SavedDataSetReference(msrest.serialization.Model):
"""SavedDataSetReference.
:ivar id:
:vartype id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
"""
super(SavedDataSetReference, self).__init__(**kwargs)
self.id = id
class SavePipelineDraftRequest(msrest.serialization.Model):
"""SavePipelineDraftRequest.
:ivar ui_widget_meta_infos:
:vartype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:ivar web_service_inputs:
:vartype web_service_inputs: list[~flow.models.WebServicePort]
:ivar web_service_outputs:
:vartype web_service_outputs: list[~flow.models.WebServicePort]
:ivar nodes_in_draft:
:vartype nodes_in_draft: list[str]
:ivar name:
:vartype name: str
:ivar pipeline_type: Possible values include: "TrainingPipeline", "RealTimeInferencePipeline",
"BatchInferencePipeline", "Unknown".
:vartype pipeline_type: str or ~flow.models.PipelineType
:ivar pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:vartype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:ivar graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:vartype graph_components_mode: str or ~flow.models.GraphComponentsMode
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'ui_widget_meta_infos': {'key': 'uiWidgetMetaInfos', 'type': '[UIWidgetMetaInfo]'},
'web_service_inputs': {'key': 'webServiceInputs', 'type': '[WebServicePort]'},
'web_service_outputs': {'key': 'webServiceOutputs', 'type': '[WebServicePort]'},
'nodes_in_draft': {'key': 'nodesInDraft', 'type': '[str]'},
'name': {'key': 'name', 'type': 'str'},
'pipeline_type': {'key': 'pipelineType', 'type': 'str'},
'pipeline_draft_mode': {'key': 'pipelineDraftMode', 'type': 'str'},
'graph_components_mode': {'key': 'graphComponentsMode', 'type': 'str'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
ui_widget_meta_infos: Optional[List["UIWidgetMetaInfo"]] = None,
web_service_inputs: Optional[List["WebServicePort"]] = None,
web_service_outputs: Optional[List["WebServicePort"]] = None,
nodes_in_draft: Optional[List[str]] = None,
name: Optional[str] = None,
pipeline_type: Optional[Union[str, "PipelineType"]] = None,
pipeline_draft_mode: Optional[Union[str, "PipelineDraftMode"]] = None,
graph_components_mode: Optional[Union[str, "GraphComponentsMode"]] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword ui_widget_meta_infos:
:paramtype ui_widget_meta_infos: list[~flow.models.UIWidgetMetaInfo]
:keyword web_service_inputs:
:paramtype web_service_inputs: list[~flow.models.WebServicePort]
:keyword web_service_outputs:
:paramtype web_service_outputs: list[~flow.models.WebServicePort]
:keyword nodes_in_draft:
:paramtype nodes_in_draft: list[str]
:keyword name:
:paramtype name: str
:keyword pipeline_type: Possible values include: "TrainingPipeline",
"RealTimeInferencePipeline", "BatchInferencePipeline", "Unknown".
:paramtype pipeline_type: str or ~flow.models.PipelineType
:keyword pipeline_draft_mode: Possible values include: "None", "Normal", "Custom".
:paramtype pipeline_draft_mode: str or ~flow.models.PipelineDraftMode
:keyword graph_components_mode: Possible values include: "Normal", "AllDesignerBuildin",
"ContainsDesignerBuildin".
:paramtype graph_components_mode: str or ~flow.models.GraphComponentsMode
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SavePipelineDraftRequest, self).__init__(**kwargs)
self.ui_widget_meta_infos = ui_widget_meta_infos
self.web_service_inputs = web_service_inputs
self.web_service_outputs = web_service_outputs
self.nodes_in_draft = nodes_in_draft
self.name = name
self.pipeline_type = pipeline_type
self.pipeline_draft_mode = pipeline_draft_mode
self.graph_components_mode = graph_components_mode
self.sub_pipelines_info = sub_pipelines_info
self.flattened_sub_graphs = flattened_sub_graphs
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class ScheduleBase(msrest.serialization.Model):
"""ScheduleBase.
:ivar schedule_status: Possible values include: "Enabled", "Disabled".
:vartype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:ivar schedule_type: Possible values include: "Cron", "Recurrence".
:vartype schedule_type: str or ~flow.models.ScheduleType
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar time_zone:
:vartype time_zone: str
:ivar expression:
:vartype expression: str
:ivar frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:vartype frequency: str or ~flow.models.RecurrenceFrequency
:ivar interval:
:vartype interval: int
:ivar pattern:
:vartype pattern: ~flow.models.RecurrencePattern
"""
_attribute_map = {
'schedule_status': {'key': 'scheduleStatus', 'type': 'str'},
'schedule_type': {'key': 'scheduleType', 'type': 'str'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
'expression': {'key': 'expression', 'type': 'str'},
'frequency': {'key': 'frequency', 'type': 'str'},
'interval': {'key': 'interval', 'type': 'int'},
'pattern': {'key': 'pattern', 'type': 'RecurrencePattern'},
}
def __init__(
self,
*,
schedule_status: Optional[Union[str, "MfeInternalScheduleStatus"]] = None,
schedule_type: Optional[Union[str, "ScheduleType"]] = None,
end_time: Optional[datetime.datetime] = None,
start_time: Optional[datetime.datetime] = None,
time_zone: Optional[str] = None,
expression: Optional[str] = None,
frequency: Optional[Union[str, "RecurrenceFrequency"]] = None,
interval: Optional[int] = None,
pattern: Optional["RecurrencePattern"] = None,
**kwargs
):
"""
:keyword schedule_status: Possible values include: "Enabled", "Disabled".
:paramtype schedule_status: str or ~flow.models.MfeInternalScheduleStatus
:keyword schedule_type: Possible values include: "Cron", "Recurrence".
:paramtype schedule_type: str or ~flow.models.ScheduleType
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword time_zone:
:paramtype time_zone: str
:keyword expression:
:paramtype expression: str
:keyword frequency: Possible values include: "Minute", "Hour", "Day", "Week", "Month".
:paramtype frequency: str or ~flow.models.RecurrenceFrequency
:keyword interval:
:paramtype interval: int
:keyword pattern:
:paramtype pattern: ~flow.models.RecurrencePattern
"""
super(ScheduleBase, self).__init__(**kwargs)
self.schedule_status = schedule_status
self.schedule_type = schedule_type
self.end_time = end_time
self.start_time = start_time
self.time_zone = time_zone
self.expression = expression
self.frequency = frequency
self.interval = interval
self.pattern = pattern
class SchemaContractsCreatedBy(msrest.serialization.Model):
"""SchemaContractsCreatedBy.
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar user_name:
:vartype user_name: str
:ivar user_principal_name:
:vartype user_principal_name: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'user_principal_name': {'key': 'userPrincipalName', 'type': 'str'},
}
def __init__(
self,
*,
user_object_id: Optional[str] = None,
user_tenant_id: Optional[str] = None,
user_name: Optional[str] = None,
user_principal_name: Optional[str] = None,
**kwargs
):
"""
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword user_name:
:paramtype user_name: str
:keyword user_principal_name:
:paramtype user_principal_name: str
"""
super(SchemaContractsCreatedBy, self).__init__(**kwargs)
self.user_object_id = user_object_id
self.user_tenant_id = user_tenant_id
self.user_name = user_name
self.user_principal_name = user_principal_name
class ScopeCloudConfiguration(msrest.serialization.Model):
"""ScopeCloudConfiguration.
:ivar input_path_suffixes: This is a dictionary.
:vartype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar output_path_suffixes: This is a dictionary.
:vartype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:ivar user_alias:
:vartype user_alias: str
:ivar tokens:
:vartype tokens: int
:ivar auto_token:
:vartype auto_token: int
:ivar vcp:
:vartype vcp: float
"""
_attribute_map = {
'input_path_suffixes': {'key': 'inputPathSuffixes', 'type': '{ArgumentAssignment}'},
'output_path_suffixes': {'key': 'outputPathSuffixes', 'type': '{ArgumentAssignment}'},
'user_alias': {'key': 'userAlias', 'type': 'str'},
'tokens': {'key': 'tokens', 'type': 'int'},
'auto_token': {'key': 'autoToken', 'type': 'int'},
'vcp': {'key': 'vcp', 'type': 'float'},
}
def __init__(
self,
*,
input_path_suffixes: Optional[Dict[str, "ArgumentAssignment"]] = None,
output_path_suffixes: Optional[Dict[str, "ArgumentAssignment"]] = None,
user_alias: Optional[str] = None,
tokens: Optional[int] = None,
auto_token: Optional[int] = None,
vcp: Optional[float] = None,
**kwargs
):
"""
:keyword input_path_suffixes: This is a dictionary.
:paramtype input_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword output_path_suffixes: This is a dictionary.
:paramtype output_path_suffixes: dict[str, ~flow.models.ArgumentAssignment]
:keyword user_alias:
:paramtype user_alias: str
:keyword tokens:
:paramtype tokens: int
:keyword auto_token:
:paramtype auto_token: int
:keyword vcp:
:paramtype vcp: float
"""
super(ScopeCloudConfiguration, self).__init__(**kwargs)
self.input_path_suffixes = input_path_suffixes
self.output_path_suffixes = output_path_suffixes
self.user_alias = user_alias
self.tokens = tokens
self.auto_token = auto_token
self.vcp = vcp
class Seasonality(msrest.serialization.Model):
"""Seasonality.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.SeasonalityMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "SeasonalityMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.SeasonalityMode
:keyword value:
:paramtype value: int
"""
super(Seasonality, self).__init__(**kwargs)
self.mode = mode
self.value = value
class SecretConfiguration(msrest.serialization.Model):
"""SecretConfiguration.
:ivar workspace_secret_name:
:vartype workspace_secret_name: str
:ivar uri:
:vartype uri: str
"""
_attribute_map = {
'workspace_secret_name': {'key': 'workspace_secret_name', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
*,
workspace_secret_name: Optional[str] = None,
uri: Optional[str] = None,
**kwargs
):
"""
:keyword workspace_secret_name:
:paramtype workspace_secret_name: str
:keyword uri:
:paramtype uri: str
"""
super(SecretConfiguration, self).__init__(**kwargs)
self.workspace_secret_name = workspace_secret_name
self.uri = uri
class SegmentedResult1(msrest.serialization.Model):
"""SegmentedResult1.
:ivar value:
:vartype value: list[~flow.models.FlowIndexEntity]
:ivar continuation_token:
:vartype continuation_token: str
:ivar count:
:vartype count: int
:ivar next_link:
:vartype next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[FlowIndexEntity]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["FlowIndexEntity"]] = None,
continuation_token: Optional[str] = None,
count: Optional[int] = None,
next_link: Optional[str] = None,
**kwargs
):
"""
:keyword value:
:paramtype value: list[~flow.models.FlowIndexEntity]
:keyword continuation_token:
:paramtype continuation_token: str
:keyword count:
:paramtype count: int
:keyword next_link:
:paramtype next_link: str
"""
super(SegmentedResult1, self).__init__(**kwargs)
self.value = value
self.continuation_token = continuation_token
self.count = count
self.next_link = next_link
class ServiceLogRequest(msrest.serialization.Model):
"""ServiceLogRequest.
:ivar log_level: Possible values include: "Trace", "Debug", "Information", "Warning", "Error",
"Critical", "None".
:vartype log_level: str or ~flow.models.LogLevel
:ivar message:
:vartype message: str
:ivar timestamp:
:vartype timestamp: ~datetime.datetime
"""
_attribute_map = {
'log_level': {'key': 'logLevel', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
}
def __init__(
self,
*,
log_level: Optional[Union[str, "LogLevel"]] = None,
message: Optional[str] = None,
timestamp: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword log_level: Possible values include: "Trace", "Debug", "Information", "Warning",
"Error", "Critical", "None".
:paramtype log_level: str or ~flow.models.LogLevel
:keyword message:
:paramtype message: str
:keyword timestamp:
:paramtype timestamp: ~datetime.datetime
"""
super(ServiceLogRequest, self).__init__(**kwargs)
self.log_level = log_level
self.message = message
self.timestamp = timestamp
class SessionApplication(msrest.serialization.Model):
"""SessionApplication.
:ivar image:
:vartype image: str
:ivar env_vars: Dictionary of :code:`<string>`.
:vartype env_vars: dict[str, str]
:ivar python_pip_requirements:
:vartype python_pip_requirements: list[str]
:ivar setup_results:
:vartype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
"""
_attribute_map = {
'image': {'key': 'image', 'type': 'str'},
'env_vars': {'key': 'envVars', 'type': '{str}'},
'python_pip_requirements': {'key': 'pythonPipRequirements', 'type': '[str]'},
'setup_results': {'key': 'setupResults', 'type': '[SessionApplicationRunCommandResult]'},
}
def __init__(
self,
*,
image: Optional[str] = None,
env_vars: Optional[Dict[str, str]] = None,
python_pip_requirements: Optional[List[str]] = None,
setup_results: Optional[List["SessionApplicationRunCommandResult"]] = None,
**kwargs
):
"""
:keyword image:
:paramtype image: str
:keyword env_vars: Dictionary of :code:`<string>`.
:paramtype env_vars: dict[str, str]
:keyword python_pip_requirements:
:paramtype python_pip_requirements: list[str]
:keyword setup_results:
:paramtype setup_results: list[~flow.models.SessionApplicationRunCommandResult]
"""
super(SessionApplication, self).__init__(**kwargs)
self.image = image
self.env_vars = env_vars
self.python_pip_requirements = python_pip_requirements
self.setup_results = setup_results
class SessionApplicationRunCommandResult(msrest.serialization.Model):
"""SessionApplicationRunCommandResult.
:ivar command:
:vartype command: str
:ivar arguments:
:vartype arguments: list[str]
:ivar exit_code:
:vartype exit_code: int
:ivar std_out:
:vartype std_out: str
:ivar std_err:
:vartype std_err: str
"""
_attribute_map = {
'command': {'key': 'command', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': '[str]'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'std_out': {'key': 'stdOut', 'type': 'str'},
'std_err': {'key': 'stdErr', 'type': 'str'},
}
def __init__(
self,
*,
command: Optional[str] = None,
arguments: Optional[List[str]] = None,
exit_code: Optional[int] = None,
std_out: Optional[str] = None,
std_err: Optional[str] = None,
**kwargs
):
"""
:keyword command:
:paramtype command: str
:keyword arguments:
:paramtype arguments: list[str]
:keyword exit_code:
:paramtype exit_code: int
:keyword std_out:
:paramtype std_out: str
:keyword std_err:
:paramtype std_err: str
"""
super(SessionApplicationRunCommandResult, self).__init__(**kwargs)
self.command = command
self.arguments = arguments
self.exit_code = exit_code
self.std_out = std_out
self.std_err = std_err
class SessionProperties(msrest.serialization.Model):
"""SessionProperties.
:ivar session_id:
:vartype session_id: str
:ivar subscription_id:
:vartype subscription_id: str
:ivar resource_group_name:
:vartype resource_group_name: str
:ivar workspace_name:
:vartype workspace_name: str
:ivar user_object_id:
:vartype user_object_id: str
:ivar user_tenant_id:
:vartype user_tenant_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar application:
:vartype application: ~flow.models.SessionApplication
:ivar last_alive_time:
:vartype last_alive_time: ~datetime.datetime
"""
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'workspace_name': {'key': 'workspaceName', 'type': 'str'},
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'application': {'key': 'application', 'type': 'SessionApplication'},
'last_alive_time': {'key': 'lastAliveTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
session_id: Optional[str] = None,
subscription_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
user_object_id: Optional[str] = None,
user_tenant_id: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
application: Optional["SessionApplication"] = None,
last_alive_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword session_id:
:paramtype session_id: str
:keyword subscription_id:
:paramtype subscription_id: str
:keyword resource_group_name:
:paramtype resource_group_name: str
:keyword workspace_name:
:paramtype workspace_name: str
:keyword user_object_id:
:paramtype user_object_id: str
:keyword user_tenant_id:
:paramtype user_tenant_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword application:
:paramtype application: ~flow.models.SessionApplication
:keyword last_alive_time:
:paramtype last_alive_time: ~datetime.datetime
"""
super(SessionProperties, self).__init__(**kwargs)
self.session_id = session_id
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.workspace_name = workspace_name
self.user_object_id = user_object_id
self.user_tenant_id = user_tenant_id
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.application = application
self.last_alive_time = last_alive_time
class SetupFlowSessionRequest(msrest.serialization.Model):
"""SetupFlowSessionRequest.
:ivar action: Possible values include: "Install", "Reset", "Update", "Delete".
:vartype action: str or ~flow.models.SetupFlowSessionAction
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
action: Optional[Union[str, "SetupFlowSessionAction"]] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword action: Possible values include: "Install", "Reset", "Update", "Delete".
:paramtype action: str or ~flow.models.SetupFlowSessionAction
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(SetupFlowSessionRequest, self).__init__(**kwargs)
self.action = action
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class SharingScope(msrest.serialization.Model):
"""SharingScope.
:ivar type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:vartype type: str or ~flow.models.ScopeType
:ivar identifier:
:vartype identifier: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'identifier': {'key': 'identifier', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "ScopeType"]] = None,
identifier: Optional[str] = None,
**kwargs
):
"""
:keyword type: Possible values include: "Global", "Tenant", "Subscription", "ResourceGroup",
"Workspace".
:paramtype type: str or ~flow.models.ScopeType
:keyword identifier:
:paramtype identifier: str
"""
super(SharingScope, self).__init__(**kwargs)
self.type = type
self.identifier = identifier
class Snapshot(msrest.serialization.Model):
"""Snapshot.
:ivar id:
:vartype id: str
:ivar directory_name:
:vartype directory_name: str
:ivar snapshot_asset_id:
:vartype snapshot_asset_id: str
:ivar snapshot_entity_id:
:vartype snapshot_entity_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'directory_name': {'key': 'directoryName', 'type': 'str'},
'snapshot_asset_id': {'key': 'snapshotAssetId', 'type': 'str'},
'snapshot_entity_id': {'key': 'snapshotEntityId', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
directory_name: Optional[str] = None,
snapshot_asset_id: Optional[str] = None,
snapshot_entity_id: Optional[str] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword directory_name:
:paramtype directory_name: str
:keyword snapshot_asset_id:
:paramtype snapshot_asset_id: str
:keyword snapshot_entity_id:
:paramtype snapshot_entity_id: str
"""
super(Snapshot, self).__init__(**kwargs)
self.id = id
self.directory_name = directory_name
self.snapshot_asset_id = snapshot_asset_id
self.snapshot_entity_id = snapshot_entity_id
class SnapshotInfo(msrest.serialization.Model):
"""SnapshotInfo.
:ivar root_download_url:
:vartype root_download_url: str
:ivar snapshots: This is a dictionary.
:vartype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
_attribute_map = {
'root_download_url': {'key': 'rootDownloadUrl', 'type': 'str'},
'snapshots': {'key': 'snapshots', 'type': '{DownloadResourceInfo}'},
}
def __init__(
self,
*,
root_download_url: Optional[str] = None,
snapshots: Optional[Dict[str, "DownloadResourceInfo"]] = None,
**kwargs
):
"""
:keyword root_download_url:
:paramtype root_download_url: str
:keyword snapshots: This is a dictionary.
:paramtype snapshots: dict[str, ~flow.models.DownloadResourceInfo]
"""
super(SnapshotInfo, self).__init__(**kwargs)
self.root_download_url = root_download_url
self.snapshots = snapshots
class SourceCodeDataReference(msrest.serialization.Model):
"""SourceCodeDataReference.
:ivar data_store_name:
:vartype data_store_name: str
:ivar path:
:vartype path: str
"""
_attribute_map = {
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
*,
data_store_name: Optional[str] = None,
path: Optional[str] = None,
**kwargs
):
"""
:keyword data_store_name:
:paramtype data_store_name: str
:keyword path:
:paramtype path: str
"""
super(SourceCodeDataReference, self).__init__(**kwargs)
self.data_store_name = data_store_name
self.path = path
class SparkConfiguration(msrest.serialization.Model):
"""SparkConfiguration.
:ivar configuration: Dictionary of :code:`<string>`.
:vartype configuration: dict[str, str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar py_files:
:vartype py_files: list[str]
:ivar spark_pool_resource_id:
:vartype spark_pool_resource_id: str
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': '{str}'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'spark_pool_resource_id': {'key': 'sparkPoolResourceId', 'type': 'str'},
}
def __init__(
self,
*,
configuration: Optional[Dict[str, str]] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
py_files: Optional[List[str]] = None,
spark_pool_resource_id: Optional[str] = None,
**kwargs
):
"""
:keyword configuration: Dictionary of :code:`<string>`.
:paramtype configuration: dict[str, str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword py_files:
:paramtype py_files: list[str]
:keyword spark_pool_resource_id:
:paramtype spark_pool_resource_id: str
"""
super(SparkConfiguration, self).__init__(**kwargs)
self.configuration = configuration
self.files = files
self.archives = archives
self.jars = jars
self.py_files = py_files
self.spark_pool_resource_id = spark_pool_resource_id
class SparkJarTaskDto(msrest.serialization.Model):
"""SparkJarTaskDto.
:ivar main_class_name:
:vartype main_class_name: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'main_class_name': {'key': 'main_class_name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
*,
main_class_name: Optional[str] = None,
parameters: Optional[List[str]] = None,
**kwargs
):
"""
:keyword main_class_name:
:paramtype main_class_name: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkJarTaskDto, self).__init__(**kwargs)
self.main_class_name = main_class_name
self.parameters = parameters
class SparkJob(msrest.serialization.Model):
"""SparkJob.
:ivar job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:vartype job_type: str or ~flow.models.JobType
:ivar resources:
:vartype resources: ~flow.models.SparkResourceConfiguration
:ivar args:
:vartype args: str
:ivar code_id:
:vartype code_id: str
:ivar entry:
:vartype entry: ~flow.models.SparkJobEntry
:ivar py_files:
:vartype py_files: list[str]
:ivar jars:
:vartype jars: list[str]
:ivar files:
:vartype files: list[str]
:ivar archives:
:vartype archives: list[str]
:ivar environment_id:
:vartype environment_id: str
:ivar input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:vartype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:ivar output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:vartype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:ivar conf: Dictionary of :code:`<string>`.
:vartype conf: dict[str, str]
:ivar environment_variables: Dictionary of :code:`<string>`.
:vartype environment_variables: dict[str, str]
:ivar provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:vartype provisioning_state: str or ~flow.models.JobProvisioningState
:ivar parent_job_name:
:vartype parent_job_name: str
:ivar display_name:
:vartype display_name: str
:ivar experiment_name:
:vartype experiment_name: str
:ivar status: Possible values include: "NotStarted", "Starting", "Provisioning", "Preparing",
"Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled",
"NotResponding", "Paused", "Unknown", "Scheduled".
:vartype status: str or ~flow.models.JobStatus
:ivar interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:vartype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:ivar identity:
:vartype identity: ~flow.models.MfeInternalIdentityConfiguration
:ivar compute:
:vartype compute: ~flow.models.ComputeConfiguration
:ivar priority:
:vartype priority: int
:ivar output:
:vartype output: ~flow.models.JobOutputArtifacts
:ivar is_archived:
:vartype is_archived: bool
:ivar schedule:
:vartype schedule: ~flow.models.ScheduleBase
:ivar component_id:
:vartype component_id: str
:ivar notification_setting:
:vartype notification_setting: ~flow.models.NotificationSetting
:ivar secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:vartype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
"""
_attribute_map = {
'job_type': {'key': 'jobType', 'type': 'str'},
'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'},
'args': {'key': 'args', 'type': 'str'},
'code_id': {'key': 'codeId', 'type': 'str'},
'entry': {'key': 'entry', 'type': 'SparkJobEntry'},
'py_files': {'key': 'pyFiles', 'type': '[str]'},
'jars': {'key': 'jars', 'type': '[str]'},
'files': {'key': 'files', 'type': '[str]'},
'archives': {'key': 'archives', 'type': '[str]'},
'environment_id': {'key': 'environmentId', 'type': 'str'},
'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'},
'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'},
'conf': {'key': 'conf', 'type': '{str}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'parent_job_name': {'key': 'parentJobName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'},
'identity': {'key': 'identity', 'type': 'MfeInternalIdentityConfiguration'},
'compute': {'key': 'compute', 'type': 'ComputeConfiguration'},
'priority': {'key': 'priority', 'type': 'int'},
'output': {'key': 'output', 'type': 'JobOutputArtifacts'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'schedule': {'key': 'schedule', 'type': 'ScheduleBase'},
'component_id': {'key': 'componentId', 'type': 'str'},
'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'},
'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{MfeInternalSecretConfiguration}'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
job_type: Optional[Union[str, "JobType"]] = None,
resources: Optional["SparkResourceConfiguration"] = None,
args: Optional[str] = None,
code_id: Optional[str] = None,
entry: Optional["SparkJobEntry"] = None,
py_files: Optional[List[str]] = None,
jars: Optional[List[str]] = None,
files: Optional[List[str]] = None,
archives: Optional[List[str]] = None,
environment_id: Optional[str] = None,
input_data_bindings: Optional[Dict[str, "InputDataBinding"]] = None,
output_data_bindings: Optional[Dict[str, "OutputDataBinding"]] = None,
conf: Optional[Dict[str, str]] = None,
environment_variables: Optional[Dict[str, str]] = None,
provisioning_state: Optional[Union[str, "JobProvisioningState"]] = None,
parent_job_name: Optional[str] = None,
display_name: Optional[str] = None,
experiment_name: Optional[str] = None,
status: Optional[Union[str, "JobStatus"]] = None,
interaction_endpoints: Optional[Dict[str, "JobEndpoint"]] = None,
identity: Optional["MfeInternalIdentityConfiguration"] = None,
compute: Optional["ComputeConfiguration"] = None,
priority: Optional[int] = None,
output: Optional["JobOutputArtifacts"] = None,
is_archived: Optional[bool] = None,
schedule: Optional["ScheduleBase"] = None,
component_id: Optional[str] = None,
notification_setting: Optional["NotificationSetting"] = None,
secrets_configuration: Optional[Dict[str, "MfeInternalSecretConfiguration"]] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword job_type: Possible values include: "Command", "Sweep", "Labeling", "Pipeline", "Data",
"AutoML", "Spark", "Base".
:paramtype job_type: str or ~flow.models.JobType
:keyword resources:
:paramtype resources: ~flow.models.SparkResourceConfiguration
:keyword args:
:paramtype args: str
:keyword code_id:
:paramtype code_id: str
:keyword entry:
:paramtype entry: ~flow.models.SparkJobEntry
:keyword py_files:
:paramtype py_files: list[str]
:keyword jars:
:paramtype jars: list[str]
:keyword files:
:paramtype files: list[str]
:keyword archives:
:paramtype archives: list[str]
:keyword environment_id:
:paramtype environment_id: str
:keyword input_data_bindings: Dictionary of :code:`<InputDataBinding>`.
:paramtype input_data_bindings: dict[str, ~flow.models.InputDataBinding]
:keyword output_data_bindings: Dictionary of :code:`<OutputDataBinding>`.
:paramtype output_data_bindings: dict[str, ~flow.models.OutputDataBinding]
:keyword conf: Dictionary of :code:`<string>`.
:paramtype conf: dict[str, str]
:keyword environment_variables: Dictionary of :code:`<string>`.
:paramtype environment_variables: dict[str, str]
:keyword provisioning_state: Possible values include: "Succeeded", "Failed", "Canceled",
"InProgress".
:paramtype provisioning_state: str or ~flow.models.JobProvisioningState
:keyword parent_job_name:
:paramtype parent_job_name: str
:keyword display_name:
:paramtype display_name: str
:keyword experiment_name:
:paramtype experiment_name: str
:keyword status: Possible values include: "NotStarted", "Starting", "Provisioning",
"Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed",
"Canceled", "NotResponding", "Paused", "Unknown", "Scheduled".
:paramtype status: str or ~flow.models.JobStatus
:keyword interaction_endpoints: Dictionary of :code:`<JobEndpoint>`.
:paramtype interaction_endpoints: dict[str, ~flow.models.JobEndpoint]
:keyword identity:
:paramtype identity: ~flow.models.MfeInternalIdentityConfiguration
:keyword compute:
:paramtype compute: ~flow.models.ComputeConfiguration
:keyword priority:
:paramtype priority: int
:keyword output:
:paramtype output: ~flow.models.JobOutputArtifacts
:keyword is_archived:
:paramtype is_archived: bool
:keyword schedule:
:paramtype schedule: ~flow.models.ScheduleBase
:keyword component_id:
:paramtype component_id: str
:keyword notification_setting:
:paramtype notification_setting: ~flow.models.NotificationSetting
:keyword secrets_configuration: Dictionary of :code:`<MfeInternalSecretConfiguration>`.
:paramtype secrets_configuration: dict[str, ~flow.models.MfeInternalSecretConfiguration]
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
"""
super(SparkJob, self).__init__(**kwargs)
self.job_type = job_type
self.resources = resources
self.args = args
self.code_id = code_id
self.entry = entry
self.py_files = py_files
self.jars = jars
self.files = files
self.archives = archives
self.environment_id = environment_id
self.input_data_bindings = input_data_bindings
self.output_data_bindings = output_data_bindings
self.conf = conf
self.environment_variables = environment_variables
self.provisioning_state = provisioning_state
self.parent_job_name = parent_job_name
self.display_name = display_name
self.experiment_name = experiment_name
self.status = status
self.interaction_endpoints = interaction_endpoints
self.identity = identity
self.compute = compute
self.priority = priority
self.output = output
self.is_archived = is_archived
self.schedule = schedule
self.component_id = component_id
self.notification_setting = notification_setting
self.secrets_configuration = secrets_configuration
self.description = description
self.tags = tags
self.properties = properties
class SparkJobEntry(msrest.serialization.Model):
"""SparkJobEntry.
:ivar file:
:vartype file: str
:ivar class_name:
:vartype class_name: str
"""
_attribute_map = {
'file': {'key': 'file', 'type': 'str'},
'class_name': {'key': 'className', 'type': 'str'},
}
def __init__(
self,
*,
file: Optional[str] = None,
class_name: Optional[str] = None,
**kwargs
):
"""
:keyword file:
:paramtype file: str
:keyword class_name:
:paramtype class_name: str
"""
super(SparkJobEntry, self).__init__(**kwargs)
self.file = file
self.class_name = class_name
class SparkMavenPackage(msrest.serialization.Model):
"""SparkMavenPackage.
:ivar group:
:vartype group: str
:ivar artifact:
:vartype artifact: str
:ivar version:
:vartype version: str
"""
_attribute_map = {
'group': {'key': 'group', 'type': 'str'},
'artifact': {'key': 'artifact', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
*,
group: Optional[str] = None,
artifact: Optional[str] = None,
version: Optional[str] = None,
**kwargs
):
"""
:keyword group:
:paramtype group: str
:keyword artifact:
:paramtype artifact: str
:keyword version:
:paramtype version: str
"""
super(SparkMavenPackage, self).__init__(**kwargs)
self.group = group
self.artifact = artifact
self.version = version
class SparkPythonTaskDto(msrest.serialization.Model):
"""SparkPythonTaskDto.
:ivar python_file:
:vartype python_file: str
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'python_file': {'key': 'python_file', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
*,
python_file: Optional[str] = None,
parameters: Optional[List[str]] = None,
**kwargs
):
"""
:keyword python_file:
:paramtype python_file: str
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkPythonTaskDto, self).__init__(**kwargs)
self.python_file = python_file
self.parameters = parameters
class SparkResourceConfiguration(msrest.serialization.Model):
"""SparkResourceConfiguration.
:ivar instance_type:
:vartype instance_type: str
:ivar runtime_version:
:vartype runtime_version: str
"""
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
}
def __init__(
self,
*,
instance_type: Optional[str] = None,
runtime_version: Optional[str] = None,
**kwargs
):
"""
:keyword instance_type:
:paramtype instance_type: str
:keyword runtime_version:
:paramtype runtime_version: str
"""
super(SparkResourceConfiguration, self).__init__(**kwargs)
self.instance_type = instance_type
self.runtime_version = runtime_version
class SparkSection(msrest.serialization.Model):
"""SparkSection.
:ivar repositories:
:vartype repositories: list[str]
:ivar packages:
:vartype packages: list[~flow.models.SparkMavenPackage]
:ivar precache_packages:
:vartype precache_packages: bool
"""
_attribute_map = {
'repositories': {'key': 'repositories', 'type': '[str]'},
'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
}
def __init__(
self,
*,
repositories: Optional[List[str]] = None,
packages: Optional[List["SparkMavenPackage"]] = None,
precache_packages: Optional[bool] = None,
**kwargs
):
"""
:keyword repositories:
:paramtype repositories: list[str]
:keyword packages:
:paramtype packages: list[~flow.models.SparkMavenPackage]
:keyword precache_packages:
:paramtype precache_packages: bool
"""
super(SparkSection, self).__init__(**kwargs)
self.repositories = repositories
self.packages = packages
self.precache_packages = precache_packages
class SparkSubmitTaskDto(msrest.serialization.Model):
"""SparkSubmitTaskDto.
:ivar parameters:
:vartype parameters: list[str]
"""
_attribute_map = {
'parameters': {'key': 'parameters', 'type': '[str]'},
}
def __init__(
self,
*,
parameters: Optional[List[str]] = None,
**kwargs
):
"""
:keyword parameters:
:paramtype parameters: list[str]
"""
super(SparkSubmitTaskDto, self).__init__(**kwargs)
self.parameters = parameters
class SqlDataPath(msrest.serialization.Model):
"""SqlDataPath.
:ivar sql_table_name:
:vartype sql_table_name: str
:ivar sql_query:
:vartype sql_query: str
:ivar sql_stored_procedure_name:
:vartype sql_stored_procedure_name: str
:ivar sql_stored_procedure_params:
:vartype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
_attribute_map = {
'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
'sql_query': {'key': 'sqlQuery', 'type': 'str'},
'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
}
def __init__(
self,
*,
sql_table_name: Optional[str] = None,
sql_query: Optional[str] = None,
sql_stored_procedure_name: Optional[str] = None,
sql_stored_procedure_params: Optional[List["StoredProcedureParameter"]] = None,
**kwargs
):
"""
:keyword sql_table_name:
:paramtype sql_table_name: str
:keyword sql_query:
:paramtype sql_query: str
:keyword sql_stored_procedure_name:
:paramtype sql_stored_procedure_name: str
:keyword sql_stored_procedure_params:
:paramtype sql_stored_procedure_params: list[~flow.models.StoredProcedureParameter]
"""
super(SqlDataPath, self).__init__(**kwargs)
self.sql_table_name = sql_table_name
self.sql_query = sql_query
self.sql_stored_procedure_name = sql_stored_procedure_name
self.sql_stored_procedure_params = sql_stored_procedure_params
class StackEnsembleSettings(msrest.serialization.Model):
"""StackEnsembleSettings.
:ivar stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:vartype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:ivar stack_meta_learner_train_percentage:
:vartype stack_meta_learner_train_percentage: float
:ivar stack_meta_learner_k_wargs: Anything.
:vartype stack_meta_learner_k_wargs: any
"""
_attribute_map = {
'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'},
'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'},
'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'},
}
def __init__(
self,
*,
stack_meta_learner_type: Optional[Union[str, "StackMetaLearnerType"]] = None,
stack_meta_learner_train_percentage: Optional[float] = None,
stack_meta_learner_k_wargs: Optional[Any] = None,
**kwargs
):
"""
:keyword stack_meta_learner_type: Possible values include: "None", "LogisticRegression",
"LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV",
"LightGBMRegressor", "LinearRegression".
:paramtype stack_meta_learner_type: str or ~flow.models.StackMetaLearnerType
:keyword stack_meta_learner_train_percentage:
:paramtype stack_meta_learner_train_percentage: float
:keyword stack_meta_learner_k_wargs: Anything.
:paramtype stack_meta_learner_k_wargs: any
"""
super(StackEnsembleSettings, self).__init__(**kwargs)
self.stack_meta_learner_type = stack_meta_learner_type
self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage
self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs
class StandbyPoolProperties(msrest.serialization.Model):
"""StandbyPoolProperties.
:ivar name:
:vartype name: str
:ivar count:
:vartype count: int
:ivar vm_size:
:vartype vm_size: str
:ivar standby_available_instances:
:vartype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'standby_available_instances': {'key': 'standbyAvailableInstances', 'type': '[StandbyPoolResourceStatus]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
count: Optional[int] = None,
vm_size: Optional[str] = None,
standby_available_instances: Optional[List["StandbyPoolResourceStatus"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword count:
:paramtype count: int
:keyword vm_size:
:paramtype vm_size: str
:keyword standby_available_instances:
:paramtype standby_available_instances: list[~flow.models.StandbyPoolResourceStatus]
"""
super(StandbyPoolProperties, self).__init__(**kwargs)
self.name = name
self.count = count
self.vm_size = vm_size
self.standby_available_instances = standby_available_instances
class StandbyPoolResourceStatus(msrest.serialization.Model):
"""StandbyPoolResourceStatus.
:ivar status:
:vartype status: str
:ivar error:
:vartype error: ~flow.models.CloudError
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'CloudError'},
}
def __init__(
self,
*,
status: Optional[str] = None,
error: Optional["CloudError"] = None,
**kwargs
):
"""
:keyword status:
:paramtype status: str
:keyword error:
:paramtype error: ~flow.models.CloudError
"""
super(StandbyPoolResourceStatus, self).__init__(**kwargs)
self.status = status
self.error = error
class StartRunResult(msrest.serialization.Model):
"""StartRunResult.
All required parameters must be populated in order to send to Azure.
:ivar run_id: Required.
:vartype run_id: str
"""
_validation = {
'run_id': {'required': True, 'min_length': 1},
}
_attribute_map = {
'run_id': {'key': 'runId', 'type': 'str'},
}
def __init__(
self,
*,
run_id: str,
**kwargs
):
"""
:keyword run_id: Required.
:paramtype run_id: str
"""
super(StartRunResult, self).__init__(**kwargs)
self.run_id = run_id
class StepRunProfile(msrest.serialization.Model):
"""StepRunProfile.
:ivar step_run_id:
:vartype step_run_id: str
:ivar step_run_number:
:vartype step_run_number: int
:ivar run_url:
:vartype run_url: str
:ivar compute_target:
:vartype compute_target: str
:ivar compute_target_url:
:vartype compute_target_url: str
:ivar node_id:
:vartype node_id: str
:ivar node_name:
:vartype node_name: str
:ivar step_name:
:vartype step_name: str
:ivar create_time:
:vartype create_time: long
:ivar start_time:
:vartype start_time: long
:ivar end_time:
:vartype end_time: long
:ivar status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:vartype status: str or ~flow.models.RunStatus
:ivar status_detail:
:vartype status_detail: str
:ivar is_reused:
:vartype is_reused: bool
:ivar reused_pipeline_run_id:
:vartype reused_pipeline_run_id: str
:ivar reused_step_run_id:
:vartype reused_step_run_id: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar status_timeline:
:vartype status_timeline: list[~flow.models.RunStatusPeriod]
"""
_attribute_map = {
'step_run_id': {'key': 'stepRunId', 'type': 'str'},
'step_run_number': {'key': 'stepRunNumber', 'type': 'int'},
'run_url': {'key': 'runUrl', 'type': 'str'},
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'compute_target_url': {'key': 'computeTargetUrl', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'node_name': {'key': 'nodeName', 'type': 'str'},
'step_name': {'key': 'stepName', 'type': 'str'},
'create_time': {'key': 'createTime', 'type': 'long'},
'start_time': {'key': 'startTime', 'type': 'long'},
'end_time': {'key': 'endTime', 'type': 'long'},
'status': {'key': 'status', 'type': 'str'},
'status_detail': {'key': 'statusDetail', 'type': 'str'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
'reused_pipeline_run_id': {'key': 'reusedPipelineRunId', 'type': 'str'},
'reused_step_run_id': {'key': 'reusedStepRunId', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'status_timeline': {'key': 'statusTimeline', 'type': '[RunStatusPeriod]'},
}
def __init__(
self,
*,
step_run_id: Optional[str] = None,
step_run_number: Optional[int] = None,
run_url: Optional[str] = None,
compute_target: Optional[str] = None,
compute_target_url: Optional[str] = None,
node_id: Optional[str] = None,
node_name: Optional[str] = None,
step_name: Optional[str] = None,
create_time: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
status: Optional[Union[str, "RunStatus"]] = None,
status_detail: Optional[str] = None,
is_reused: Optional[bool] = None,
reused_pipeline_run_id: Optional[str] = None,
reused_step_run_id: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
status_timeline: Optional[List["RunStatusPeriod"]] = None,
**kwargs
):
"""
:keyword step_run_id:
:paramtype step_run_id: str
:keyword step_run_number:
:paramtype step_run_number: int
:keyword run_url:
:paramtype run_url: str
:keyword compute_target:
:paramtype compute_target: str
:keyword compute_target_url:
:paramtype compute_target_url: str
:keyword node_id:
:paramtype node_id: str
:keyword node_name:
:paramtype node_name: str
:keyword step_name:
:paramtype step_name: str
:keyword create_time:
:paramtype create_time: long
:keyword start_time:
:paramtype start_time: long
:keyword end_time:
:paramtype end_time: long
:keyword status: Possible values include: "NotStarted", "Unapproved", "Pausing", "Paused",
"Starting", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed",
"Failed", "Canceled".
:paramtype status: str or ~flow.models.RunStatus
:keyword status_detail:
:paramtype status_detail: str
:keyword is_reused:
:paramtype is_reused: bool
:keyword reused_pipeline_run_id:
:paramtype reused_pipeline_run_id: str
:keyword reused_step_run_id:
:paramtype reused_step_run_id: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword status_timeline:
:paramtype status_timeline: list[~flow.models.RunStatusPeriod]
"""
super(StepRunProfile, self).__init__(**kwargs)
self.step_run_id = step_run_id
self.step_run_number = step_run_number
self.run_url = run_url
self.compute_target = compute_target
self.compute_target_url = compute_target_url
self.node_id = node_id
self.node_name = node_name
self.step_name = step_name
self.create_time = create_time
self.start_time = start_time
self.end_time = end_time
self.status = status
self.status_detail = status_detail
self.is_reused = is_reused
self.reused_pipeline_run_id = reused_pipeline_run_id
self.reused_step_run_id = reused_step_run_id
self.tags = tags
self.status_timeline = status_timeline
class StorageInfo(msrest.serialization.Model):
"""StorageInfo.
:ivar storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:vartype storage_auth_type: str or ~flow.models.StorageAuthType
:ivar connection_string:
:vartype connection_string: str
:ivar sas_token:
:vartype sas_token: str
:ivar account_name:
:vartype account_name: str
"""
_attribute_map = {
'storage_auth_type': {'key': 'storageAuthType', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'sas_token': {'key': 'sasToken', 'type': 'str'},
'account_name': {'key': 'accountName', 'type': 'str'},
}
def __init__(
self,
*,
storage_auth_type: Optional[Union[str, "StorageAuthType"]] = None,
connection_string: Optional[str] = None,
sas_token: Optional[str] = None,
account_name: Optional[str] = None,
**kwargs
):
"""
:keyword storage_auth_type: Possible values include: "MSI", "ConnectionString", "SAS".
:paramtype storage_auth_type: str or ~flow.models.StorageAuthType
:keyword connection_string:
:paramtype connection_string: str
:keyword sas_token:
:paramtype sas_token: str
:keyword account_name:
:paramtype account_name: str
"""
super(StorageInfo, self).__init__(**kwargs)
self.storage_auth_type = storage_auth_type
self.connection_string = connection_string
self.sas_token = sas_token
self.account_name = account_name
class StoredProcedureParameter(msrest.serialization.Model):
"""StoredProcedureParameter.
:ivar name:
:vartype name: str
:ivar value:
:vartype value: str
:ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:vartype type: str or ~flow.models.StoredProcedureParameterType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
type: Optional[Union[str, "StoredProcedureParameterType"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword value:
:paramtype value: str
:keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
:paramtype type: str or ~flow.models.StoredProcedureParameterType
"""
super(StoredProcedureParameter, self).__init__(**kwargs)
self.name = name
self.value = value
self.type = type
class Stream(msrest.serialization.Model):
"""Stream.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar can_read:
:vartype can_read: bool
:ivar can_write:
:vartype can_write: bool
:ivar can_seek:
:vartype can_seek: bool
:ivar can_timeout:
:vartype can_timeout: bool
:ivar length:
:vartype length: long
:ivar position:
:vartype position: long
:ivar read_timeout:
:vartype read_timeout: int
:ivar write_timeout:
:vartype write_timeout: int
"""
_validation = {
'can_read': {'readonly': True},
'can_write': {'readonly': True},
'can_seek': {'readonly': True},
'can_timeout': {'readonly': True},
'length': {'readonly': True},
}
_attribute_map = {
'can_read': {'key': 'canRead', 'type': 'bool'},
'can_write': {'key': 'canWrite', 'type': 'bool'},
'can_seek': {'key': 'canSeek', 'type': 'bool'},
'can_timeout': {'key': 'canTimeout', 'type': 'bool'},
'length': {'key': 'length', 'type': 'long'},
'position': {'key': 'position', 'type': 'long'},
'read_timeout': {'key': 'readTimeout', 'type': 'int'},
'write_timeout': {'key': 'writeTimeout', 'type': 'int'},
}
def __init__(
self,
*,
position: Optional[int] = None,
read_timeout: Optional[int] = None,
write_timeout: Optional[int] = None,
**kwargs
):
"""
:keyword position:
:paramtype position: long
:keyword read_timeout:
:paramtype read_timeout: int
:keyword write_timeout:
:paramtype write_timeout: int
"""
super(Stream, self).__init__(**kwargs)
self.can_read = None
self.can_write = None
self.can_seek = None
self.can_timeout = None
self.length = None
self.position = position
self.read_timeout = read_timeout
self.write_timeout = write_timeout
class StructuredInterface(msrest.serialization.Model):
"""StructuredInterface.
:ivar command_line_pattern:
:vartype command_line_pattern: str
:ivar inputs:
:vartype inputs: list[~flow.models.StructuredInterfaceInput]
:ivar outputs:
:vartype outputs: list[~flow.models.StructuredInterfaceOutput]
:ivar control_outputs:
:vartype control_outputs: list[~flow.models.ControlOutput]
:ivar parameters:
:vartype parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar metadata_parameters:
:vartype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:ivar arguments:
:vartype arguments: list[~flow.models.ArgumentAssignment]
"""
_attribute_map = {
'command_line_pattern': {'key': 'commandLinePattern', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '[StructuredInterfaceInput]'},
'outputs': {'key': 'outputs', 'type': '[StructuredInterfaceOutput]'},
'control_outputs': {'key': 'controlOutputs', 'type': '[ControlOutput]'},
'parameters': {'key': 'parameters', 'type': '[StructuredInterfaceParameter]'},
'metadata_parameters': {'key': 'metadataParameters', 'type': '[StructuredInterfaceParameter]'},
'arguments': {'key': 'arguments', 'type': '[ArgumentAssignment]'},
}
def __init__(
self,
*,
command_line_pattern: Optional[str] = None,
inputs: Optional[List["StructuredInterfaceInput"]] = None,
outputs: Optional[List["StructuredInterfaceOutput"]] = None,
control_outputs: Optional[List["ControlOutput"]] = None,
parameters: Optional[List["StructuredInterfaceParameter"]] = None,
metadata_parameters: Optional[List["StructuredInterfaceParameter"]] = None,
arguments: Optional[List["ArgumentAssignment"]] = None,
**kwargs
):
"""
:keyword command_line_pattern:
:paramtype command_line_pattern: str
:keyword inputs:
:paramtype inputs: list[~flow.models.StructuredInterfaceInput]
:keyword outputs:
:paramtype outputs: list[~flow.models.StructuredInterfaceOutput]
:keyword control_outputs:
:paramtype control_outputs: list[~flow.models.ControlOutput]
:keyword parameters:
:paramtype parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword metadata_parameters:
:paramtype metadata_parameters: list[~flow.models.StructuredInterfaceParameter]
:keyword arguments:
:paramtype arguments: list[~flow.models.ArgumentAssignment]
"""
super(StructuredInterface, self).__init__(**kwargs)
self.command_line_pattern = command_line_pattern
self.inputs = inputs
self.outputs = outputs
self.control_outputs = control_outputs
self.parameters = parameters
self.metadata_parameters = metadata_parameters
self.arguments = arguments
class StructuredInterfaceInput(msrest.serialization.Model):
"""StructuredInterfaceInput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_ids_list:
:vartype data_type_ids_list: list[str]
:ivar is_optional:
:vartype is_optional: bool
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_resource:
:vartype is_resource: bool
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar dataset_types:
:vartype dataset_types: list[str or ~flow.models.DatasetType]
"""
_validation = {
'dataset_types': {'unique': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_ids_list': {'key': 'dataTypeIdsList', 'type': '[str]'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_resource': {'key': 'isResource', 'type': 'bool'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'dataset_types': {'key': 'datasetTypes', 'type': '[str]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
data_type_ids_list: Optional[List[str]] = None,
is_optional: Optional[bool] = None,
description: Optional[str] = None,
skip_processing: Optional[bool] = None,
is_resource: Optional[bool] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
dataset_types: Optional[List[Union[str, "DatasetType"]]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_ids_list:
:paramtype data_type_ids_list: list[str]
:keyword is_optional:
:paramtype is_optional: bool
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_resource:
:paramtype is_resource: bool
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword dataset_types:
:paramtype dataset_types: list[str or ~flow.models.DatasetType]
"""
super(StructuredInterfaceInput, self).__init__(**kwargs)
self.name = name
self.label = label
self.data_type_ids_list = data_type_ids_list
self.is_optional = is_optional
self.description = description
self.skip_processing = skip_processing
self.is_resource = is_resource
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.dataset_types = dataset_types
class StructuredInterfaceOutput(msrest.serialization.Model):
"""StructuredInterfaceOutput.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar data_type_id:
:vartype data_type_id: str
:ivar pass_through_data_type_input_name:
:vartype pass_through_data_type_input_name: str
:ivar description:
:vartype description: str
:ivar skip_processing:
:vartype skip_processing: bool
:ivar is_artifact:
:vartype is_artifact: bool
:ivar data_store_name:
:vartype data_store_name: str
:ivar data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:vartype data_store_mode: str or ~flow.models.AEVADataStoreMode
:ivar path_on_compute:
:vartype path_on_compute: str
:ivar overwrite:
:vartype overwrite: bool
:ivar data_reference_name:
:vartype data_reference_name: str
:ivar training_output:
:vartype training_output: ~flow.models.TrainingOutput
:ivar dataset_output:
:vartype dataset_output: ~flow.models.DatasetOutput
:ivar asset_output_settings:
:vartype asset_output_settings: ~flow.models.AssetOutputSettings
:ivar early_available:
:vartype early_available: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'data_type_id': {'key': 'dataTypeId', 'type': 'str'},
'pass_through_data_type_input_name': {'key': 'passThroughDataTypeInputName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'skip_processing': {'key': 'skipProcessing', 'type': 'bool'},
'is_artifact': {'key': 'IsArtifact', 'type': 'bool'},
'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
'data_store_mode': {'key': 'dataStoreMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'data_reference_name': {'key': 'dataReferenceName', 'type': 'str'},
'training_output': {'key': 'trainingOutput', 'type': 'TrainingOutput'},
'dataset_output': {'key': 'datasetOutput', 'type': 'DatasetOutput'},
'asset_output_settings': {'key': 'AssetOutputSettings', 'type': 'AssetOutputSettings'},
'early_available': {'key': 'EarlyAvailable', 'type': 'bool'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
data_type_id: Optional[str] = None,
pass_through_data_type_input_name: Optional[str] = None,
description: Optional[str] = None,
skip_processing: Optional[bool] = None,
is_artifact: Optional[bool] = None,
data_store_name: Optional[str] = None,
data_store_mode: Optional[Union[str, "AEVADataStoreMode"]] = None,
path_on_compute: Optional[str] = None,
overwrite: Optional[bool] = None,
data_reference_name: Optional[str] = None,
training_output: Optional["TrainingOutput"] = None,
dataset_output: Optional["DatasetOutput"] = None,
asset_output_settings: Optional["AssetOutputSettings"] = None,
early_available: Optional[bool] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword data_type_id:
:paramtype data_type_id: str
:keyword pass_through_data_type_input_name:
:paramtype pass_through_data_type_input_name: str
:keyword description:
:paramtype description: str
:keyword skip_processing:
:paramtype skip_processing: bool
:keyword is_artifact:
:paramtype is_artifact: bool
:keyword data_store_name:
:paramtype data_store_name: str
:keyword data_store_mode: Possible values include: "None", "Mount", "Download", "Upload",
"Direct", "Hdfs", "Link".
:paramtype data_store_mode: str or ~flow.models.AEVADataStoreMode
:keyword path_on_compute:
:paramtype path_on_compute: str
:keyword overwrite:
:paramtype overwrite: bool
:keyword data_reference_name:
:paramtype data_reference_name: str
:keyword training_output:
:paramtype training_output: ~flow.models.TrainingOutput
:keyword dataset_output:
:paramtype dataset_output: ~flow.models.DatasetOutput
:keyword asset_output_settings:
:paramtype asset_output_settings: ~flow.models.AssetOutputSettings
:keyword early_available:
:paramtype early_available: bool
"""
super(StructuredInterfaceOutput, self).__init__(**kwargs)
self.name = name
self.label = label
self.data_type_id = data_type_id
self.pass_through_data_type_input_name = pass_through_data_type_input_name
self.description = description
self.skip_processing = skip_processing
self.is_artifact = is_artifact
self.data_store_name = data_store_name
self.data_store_mode = data_store_mode
self.path_on_compute = path_on_compute
self.overwrite = overwrite
self.data_reference_name = data_reference_name
self.training_output = training_output
self.dataset_output = dataset_output
self.asset_output_settings = asset_output_settings
self.early_available = early_available
class StructuredInterfaceParameter(msrest.serialization.Model):
"""StructuredInterfaceParameter.
:ivar name:
:vartype name: str
:ivar label:
:vartype label: str
:ivar parameter_type: Possible values include: "Int", "Double", "Bool", "String", "Undefined".
:vartype parameter_type: str or ~flow.models.ParameterType
:ivar is_optional:
:vartype is_optional: bool
:ivar default_value:
:vartype default_value: str
:ivar lower_bound:
:vartype lower_bound: str
:ivar upper_bound:
:vartype upper_bound: str
:ivar enum_values:
:vartype enum_values: list[str]
:ivar enum_values_to_argument_strings: This is a dictionary.
:vartype enum_values_to_argument_strings: dict[str, str]
:ivar description:
:vartype description: str
:ivar set_environment_variable:
:vartype set_environment_variable: bool
:ivar environment_variable_override:
:vartype environment_variable_override: str
:ivar enabled_by_parameter_name:
:vartype enabled_by_parameter_name: str
:ivar enabled_by_parameter_values:
:vartype enabled_by_parameter_values: list[str]
:ivar ui_hint:
:vartype ui_hint: ~flow.models.UIParameterHint
:ivar group_names:
:vartype group_names: list[str]
:ivar argument_name:
:vartype argument_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'parameter_type': {'key': 'parameterType', 'type': 'str'},
'is_optional': {'key': 'isOptional', 'type': 'bool'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'lower_bound': {'key': 'lowerBound', 'type': 'str'},
'upper_bound': {'key': 'upperBound', 'type': 'str'},
'enum_values': {'key': 'enumValues', 'type': '[str]'},
'enum_values_to_argument_strings': {'key': 'enumValuesToArgumentStrings', 'type': '{str}'},
'description': {'key': 'description', 'type': 'str'},
'set_environment_variable': {'key': 'setEnvironmentVariable', 'type': 'bool'},
'environment_variable_override': {'key': 'environmentVariableOverride', 'type': 'str'},
'enabled_by_parameter_name': {'key': 'enabledByParameterName', 'type': 'str'},
'enabled_by_parameter_values': {'key': 'enabledByParameterValues', 'type': '[str]'},
'ui_hint': {'key': 'uiHint', 'type': 'UIParameterHint'},
'group_names': {'key': 'groupNames', 'type': '[str]'},
'argument_name': {'key': 'argumentName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
label: Optional[str] = None,
parameter_type: Optional[Union[str, "ParameterType"]] = None,
is_optional: Optional[bool] = None,
default_value: Optional[str] = None,
lower_bound: Optional[str] = None,
upper_bound: Optional[str] = None,
enum_values: Optional[List[str]] = None,
enum_values_to_argument_strings: Optional[Dict[str, str]] = None,
description: Optional[str] = None,
set_environment_variable: Optional[bool] = None,
environment_variable_override: Optional[str] = None,
enabled_by_parameter_name: Optional[str] = None,
enabled_by_parameter_values: Optional[List[str]] = None,
ui_hint: Optional["UIParameterHint"] = None,
group_names: Optional[List[str]] = None,
argument_name: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword label:
:paramtype label: str
:keyword parameter_type: Possible values include: "Int", "Double", "Bool", "String",
"Undefined".
:paramtype parameter_type: str or ~flow.models.ParameterType
:keyword is_optional:
:paramtype is_optional: bool
:keyword default_value:
:paramtype default_value: str
:keyword lower_bound:
:paramtype lower_bound: str
:keyword upper_bound:
:paramtype upper_bound: str
:keyword enum_values:
:paramtype enum_values: list[str]
:keyword enum_values_to_argument_strings: This is a dictionary.
:paramtype enum_values_to_argument_strings: dict[str, str]
:keyword description:
:paramtype description: str
:keyword set_environment_variable:
:paramtype set_environment_variable: bool
:keyword environment_variable_override:
:paramtype environment_variable_override: str
:keyword enabled_by_parameter_name:
:paramtype enabled_by_parameter_name: str
:keyword enabled_by_parameter_values:
:paramtype enabled_by_parameter_values: list[str]
:keyword ui_hint:
:paramtype ui_hint: ~flow.models.UIParameterHint
:keyword group_names:
:paramtype group_names: list[str]
:keyword argument_name:
:paramtype argument_name: str
"""
super(StructuredInterfaceParameter, self).__init__(**kwargs)
self.name = name
self.label = label
self.parameter_type = parameter_type
self.is_optional = is_optional
self.default_value = default_value
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.enum_values = enum_values
self.enum_values_to_argument_strings = enum_values_to_argument_strings
self.description = description
self.set_environment_variable = set_environment_variable
self.environment_variable_override = environment_variable_override
self.enabled_by_parameter_name = enabled_by_parameter_name
self.enabled_by_parameter_values = enabled_by_parameter_values
self.ui_hint = ui_hint
self.group_names = group_names
self.argument_name = argument_name
class StudioMigrationInfo(msrest.serialization.Model):
"""StudioMigrationInfo.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar source_workspace_id:
:vartype source_workspace_id: str
:ivar source_experiment_id:
:vartype source_experiment_id: str
:ivar source_experiment_link:
:vartype source_experiment_link: str
:ivar failed_node_id_list:
:vartype failed_node_id_list: list[str]
:ivar error_message:
:vartype error_message: str
"""
_validation = {
'error_message': {'readonly': True},
}
_attribute_map = {
'source_workspace_id': {'key': 'sourceWorkspaceId', 'type': 'str'},
'source_experiment_id': {'key': 'sourceExperimentId', 'type': 'str'},
'source_experiment_link': {'key': 'sourceExperimentLink', 'type': 'str'},
'failed_node_id_list': {'key': 'failedNodeIdList', 'type': '[str]'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(
self,
*,
source_workspace_id: Optional[str] = None,
source_experiment_id: Optional[str] = None,
source_experiment_link: Optional[str] = None,
failed_node_id_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword source_workspace_id:
:paramtype source_workspace_id: str
:keyword source_experiment_id:
:paramtype source_experiment_id: str
:keyword source_experiment_link:
:paramtype source_experiment_link: str
:keyword failed_node_id_list:
:paramtype failed_node_id_list: list[str]
"""
super(StudioMigrationInfo, self).__init__(**kwargs)
self.source_workspace_id = source_workspace_id
self.source_experiment_id = source_experiment_id
self.source_experiment_link = source_experiment_link
self.failed_node_id_list = failed_node_id_list
self.error_message = None
class SubGraphConcatenateAssignment(msrest.serialization.Model):
"""SubGraphConcatenateAssignment.
:ivar concatenate_parameter:
:vartype concatenate_parameter: list[~flow.models.ParameterAssignment]
:ivar parameter_assignments:
:vartype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
_attribute_map = {
'concatenate_parameter': {'key': 'concatenateParameter', 'type': '[ParameterAssignment]'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': 'SubPipelineParameterAssignment'},
}
def __init__(
self,
*,
concatenate_parameter: Optional[List["ParameterAssignment"]] = None,
parameter_assignments: Optional["SubPipelineParameterAssignment"] = None,
**kwargs
):
"""
:keyword concatenate_parameter:
:paramtype concatenate_parameter: list[~flow.models.ParameterAssignment]
:keyword parameter_assignments:
:paramtype parameter_assignments: ~flow.models.SubPipelineParameterAssignment
"""
super(SubGraphConcatenateAssignment, self).__init__(**kwargs)
self.concatenate_parameter = concatenate_parameter
self.parameter_assignments = parameter_assignments
class SubGraphConfiguration(msrest.serialization.Model):
"""SubGraphConfiguration.
:ivar graph_id:
:vartype graph_id: str
:ivar graph_draft_id:
:vartype graph_draft_id: str
:ivar default_cloud_priority:
:vartype default_cloud_priority: ~flow.models.CloudPrioritySetting
:ivar is_dynamic:
:vartype is_dynamic: bool
"""
_attribute_map = {
'graph_id': {'key': 'graphId', 'type': 'str'},
'graph_draft_id': {'key': 'graphDraftId', 'type': 'str'},
'default_cloud_priority': {'key': 'DefaultCloudPriority', 'type': 'CloudPrioritySetting'},
'is_dynamic': {'key': 'IsDynamic', 'type': 'bool'},
}
def __init__(
self,
*,
graph_id: Optional[str] = None,
graph_draft_id: Optional[str] = None,
default_cloud_priority: Optional["CloudPrioritySetting"] = None,
is_dynamic: Optional[bool] = False,
**kwargs
):
"""
:keyword graph_id:
:paramtype graph_id: str
:keyword graph_draft_id:
:paramtype graph_draft_id: str
:keyword default_cloud_priority:
:paramtype default_cloud_priority: ~flow.models.CloudPrioritySetting
:keyword is_dynamic:
:paramtype is_dynamic: bool
"""
super(SubGraphConfiguration, self).__init__(**kwargs)
self.graph_id = graph_id
self.graph_draft_id = graph_draft_id
self.default_cloud_priority = default_cloud_priority
self.is_dynamic = is_dynamic
class SubGraphConnectionInfo(msrest.serialization.Model):
"""SubGraphConnectionInfo.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
port_name: Optional[str] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
"""
super(SubGraphConnectionInfo, self).__init__(**kwargs)
self.node_id = node_id
self.port_name = port_name
class SubGraphDataPathParameterAssignment(msrest.serialization.Model):
"""SubGraphDataPathParameterAssignment.
:ivar data_set_path_parameter:
:vartype data_set_path_parameter: ~flow.models.DataSetPathParameter
:ivar data_set_path_parameter_assignments:
:vartype data_set_path_parameter_assignments: list[str]
"""
_attribute_map = {
'data_set_path_parameter': {'key': 'dataSetPathParameter', 'type': 'DataSetPathParameter'},
'data_set_path_parameter_assignments': {'key': 'dataSetPathParameterAssignments', 'type': '[str]'},
}
def __init__(
self,
*,
data_set_path_parameter: Optional["DataSetPathParameter"] = None,
data_set_path_parameter_assignments: Optional[List[str]] = None,
**kwargs
):
"""
:keyword data_set_path_parameter:
:paramtype data_set_path_parameter: ~flow.models.DataSetPathParameter
:keyword data_set_path_parameter_assignments:
:paramtype data_set_path_parameter_assignments: list[str]
"""
super(SubGraphDataPathParameterAssignment, self).__init__(**kwargs)
self.data_set_path_parameter = data_set_path_parameter
self.data_set_path_parameter_assignments = data_set_path_parameter_assignments
class SubGraphInfo(msrest.serialization.Model):
"""SubGraphInfo.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar id:
:vartype id: str
:ivar parent_graph_id:
:vartype parent_graph_id: str
:ivar pipeline_definition_id:
:vartype pipeline_definition_id: str
:ivar sub_graph_parameter_assignment:
:vartype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:ivar sub_graph_concatenate_assignment:
:vartype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:ivar sub_graph_data_path_parameter_assignment:
:vartype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:ivar sub_graph_default_compute_target_nodes:
:vartype sub_graph_default_compute_target_nodes: list[str]
:ivar sub_graph_default_data_store_nodes:
:vartype sub_graph_default_data_store_nodes: list[str]
:ivar inputs:
:vartype inputs: list[~flow.models.SubGraphPortInfo]
:ivar outputs:
:vartype outputs: list[~flow.models.SubGraphPortInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'id': {'key': 'id', 'type': 'str'},
'parent_graph_id': {'key': 'parentGraphId', 'type': 'str'},
'pipeline_definition_id': {'key': 'pipelineDefinitionId', 'type': 'str'},
'sub_graph_parameter_assignment': {'key': 'subGraphParameterAssignment', 'type': '[SubGraphParameterAssignment]'},
'sub_graph_concatenate_assignment': {'key': 'subGraphConcatenateAssignment', 'type': '[SubGraphConcatenateAssignment]'},
'sub_graph_data_path_parameter_assignment': {'key': 'subGraphDataPathParameterAssignment', 'type': '[SubGraphDataPathParameterAssignment]'},
'sub_graph_default_compute_target_nodes': {'key': 'subGraphDefaultComputeTargetNodes', 'type': '[str]'},
'sub_graph_default_data_store_nodes': {'key': 'subGraphDefaultDataStoreNodes', 'type': '[str]'},
'inputs': {'key': 'inputs', 'type': '[SubGraphPortInfo]'},
'outputs': {'key': 'outputs', 'type': '[SubGraphPortInfo]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
default_compute_target: Optional["ComputeSetting"] = None,
default_data_store: Optional["DatastoreSetting"] = None,
id: Optional[str] = None,
parent_graph_id: Optional[str] = None,
pipeline_definition_id: Optional[str] = None,
sub_graph_parameter_assignment: Optional[List["SubGraphParameterAssignment"]] = None,
sub_graph_concatenate_assignment: Optional[List["SubGraphConcatenateAssignment"]] = None,
sub_graph_data_path_parameter_assignment: Optional[List["SubGraphDataPathParameterAssignment"]] = None,
sub_graph_default_compute_target_nodes: Optional[List[str]] = None,
sub_graph_default_data_store_nodes: Optional[List[str]] = None,
inputs: Optional[List["SubGraphPortInfo"]] = None,
outputs: Optional[List["SubGraphPortInfo"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword id:
:paramtype id: str
:keyword parent_graph_id:
:paramtype parent_graph_id: str
:keyword pipeline_definition_id:
:paramtype pipeline_definition_id: str
:keyword sub_graph_parameter_assignment:
:paramtype sub_graph_parameter_assignment: list[~flow.models.SubGraphParameterAssignment]
:keyword sub_graph_concatenate_assignment:
:paramtype sub_graph_concatenate_assignment: list[~flow.models.SubGraphConcatenateAssignment]
:keyword sub_graph_data_path_parameter_assignment:
:paramtype sub_graph_data_path_parameter_assignment:
list[~flow.models.SubGraphDataPathParameterAssignment]
:keyword sub_graph_default_compute_target_nodes:
:paramtype sub_graph_default_compute_target_nodes: list[str]
:keyword sub_graph_default_data_store_nodes:
:paramtype sub_graph_default_data_store_nodes: list[str]
:keyword inputs:
:paramtype inputs: list[~flow.models.SubGraphPortInfo]
:keyword outputs:
:paramtype outputs: list[~flow.models.SubGraphPortInfo]
"""
super(SubGraphInfo, self).__init__(**kwargs)
self.name = name
self.description = description
self.default_compute_target = default_compute_target
self.default_data_store = default_data_store
self.id = id
self.parent_graph_id = parent_graph_id
self.pipeline_definition_id = pipeline_definition_id
self.sub_graph_parameter_assignment = sub_graph_parameter_assignment
self.sub_graph_concatenate_assignment = sub_graph_concatenate_assignment
self.sub_graph_data_path_parameter_assignment = sub_graph_data_path_parameter_assignment
self.sub_graph_default_compute_target_nodes = sub_graph_default_compute_target_nodes
self.sub_graph_default_data_store_nodes = sub_graph_default_data_store_nodes
self.inputs = inputs
self.outputs = outputs
class SubGraphParameterAssignment(msrest.serialization.Model):
"""SubGraphParameterAssignment.
:ivar parameter:
:vartype parameter: ~flow.models.Parameter
:ivar parameter_assignments:
:vartype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
_attribute_map = {
'parameter': {'key': 'parameter', 'type': 'Parameter'},
'parameter_assignments': {'key': 'parameterAssignments', 'type': '[SubPipelineParameterAssignment]'},
}
def __init__(
self,
*,
parameter: Optional["Parameter"] = None,
parameter_assignments: Optional[List["SubPipelineParameterAssignment"]] = None,
**kwargs
):
"""
:keyword parameter:
:paramtype parameter: ~flow.models.Parameter
:keyword parameter_assignments:
:paramtype parameter_assignments: list[~flow.models.SubPipelineParameterAssignment]
"""
super(SubGraphParameterAssignment, self).__init__(**kwargs)
self.parameter = parameter
self.parameter_assignments = parameter_assignments
class SubGraphPortInfo(msrest.serialization.Model):
"""SubGraphPortInfo.
:ivar name:
:vartype name: str
:ivar internal:
:vartype internal: list[~flow.models.SubGraphConnectionInfo]
:ivar external:
:vartype external: list[~flow.models.SubGraphConnectionInfo]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'internal': {'key': 'internal', 'type': '[SubGraphConnectionInfo]'},
'external': {'key': 'external', 'type': '[SubGraphConnectionInfo]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
internal: Optional[List["SubGraphConnectionInfo"]] = None,
external: Optional[List["SubGraphConnectionInfo"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword internal:
:paramtype internal: list[~flow.models.SubGraphConnectionInfo]
:keyword external:
:paramtype external: list[~flow.models.SubGraphConnectionInfo]
"""
super(SubGraphPortInfo, self).__init__(**kwargs)
self.name = name
self.internal = internal
self.external = external
class SubmitBulkRunRequest(msrest.serialization.Model):
"""SubmitBulkRunRequest.
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_definition_resource_id:
:vartype flow_definition_resource_id: str
:ivar flow_definition_data_store_name:
:vartype flow_definition_data_store_name: str
:ivar flow_definition_blob_path:
:vartype flow_definition_blob_path: str
:ivar flow_definition_data_uri:
:vartype flow_definition_data_uri: str
:ivar run_id:
:vartype run_id: str
:ivar run_display_name:
:vartype run_display_name: str
:ivar run_experiment_name:
:vartype run_experiment_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar node_variant:
:vartype node_variant: str
:ivar variant_run_id:
:vartype variant_run_id: str
:ivar baseline_run_id:
:vartype baseline_run_id: str
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar inputs_mapping: This is a dictionary.
:vartype inputs_mapping: dict[str, str]
:ivar connections: This is a dictionary.
:vartype connections: dict[str, dict[str, str]]
:ivar environment_variables: This is a dictionary.
:vartype environment_variables: dict[str, str]
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar runtime_name:
:vartype runtime_name: str
:ivar session_id:
:vartype session_id: str
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:vartype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:ivar output_data_store:
:vartype output_data_store: str
:ivar flow_lineage_id:
:vartype flow_lineage_id: str
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
_attribute_map = {
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_definition_resource_id': {'key': 'flowDefinitionResourceId', 'type': 'str'},
'flow_definition_data_store_name': {'key': 'flowDefinitionDataStoreName', 'type': 'str'},
'flow_definition_blob_path': {'key': 'flowDefinitionBlobPath', 'type': 'str'},
'flow_definition_data_uri': {'key': 'flowDefinitionDataUri', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'run_display_name': {'key': 'runDisplayName', 'type': 'str'},
'run_experiment_name': {'key': 'runExperimentName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'node_variant': {'key': 'nodeVariant', 'type': 'str'},
'variant_run_id': {'key': 'variantRunId', 'type': 'str'},
'baseline_run_id': {'key': 'baselineRunId', 'type': 'str'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'inputs_mapping': {'key': 'inputsMapping', 'type': '{str}'},
'connections': {'key': 'connections', 'type': '{{str}}'},
'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'session_setup_mode': {'key': 'sessionSetupMode', 'type': 'str'},
'output_data_store': {'key': 'outputDataStore', 'type': 'str'},
'flow_lineage_id': {'key': 'flowLineageId', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
}
def __init__(
self,
*,
flow_definition_file_path: Optional[str] = None,
flow_definition_resource_id: Optional[str] = None,
flow_definition_data_store_name: Optional[str] = None,
flow_definition_blob_path: Optional[str] = None,
flow_definition_data_uri: Optional[str] = None,
run_id: Optional[str] = None,
run_display_name: Optional[str] = None,
run_experiment_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
properties: Optional[Dict[str, str]] = None,
node_variant: Optional[str] = None,
variant_run_id: Optional[str] = None,
baseline_run_id: Optional[str] = None,
batch_data_input: Optional["BatchDataInput"] = None,
inputs_mapping: Optional[Dict[str, str]] = None,
connections: Optional[Dict[str, Dict[str, str]]] = None,
environment_variables: Optional[Dict[str, str]] = None,
aml_compute_name: Optional[str] = None,
runtime_name: Optional[str] = None,
session_id: Optional[str] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
session_setup_mode: Optional[Union[str, "SessionSetupModeEnum"]] = None,
output_data_store: Optional[str] = None,
flow_lineage_id: Optional[str] = None,
run_display_name_generation_type: Optional[Union[str, "RunDisplayNameGenerationType"]] = None,
**kwargs
):
"""
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_definition_resource_id:
:paramtype flow_definition_resource_id: str
:keyword flow_definition_data_store_name:
:paramtype flow_definition_data_store_name: str
:keyword flow_definition_blob_path:
:paramtype flow_definition_blob_path: str
:keyword flow_definition_data_uri:
:paramtype flow_definition_data_uri: str
:keyword run_id:
:paramtype run_id: str
:keyword run_display_name:
:paramtype run_display_name: str
:keyword run_experiment_name:
:paramtype run_experiment_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword node_variant:
:paramtype node_variant: str
:keyword variant_run_id:
:paramtype variant_run_id: str
:keyword baseline_run_id:
:paramtype baseline_run_id: str
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword inputs_mapping: This is a dictionary.
:paramtype inputs_mapping: dict[str, str]
:keyword connections: This is a dictionary.
:paramtype connections: dict[str, dict[str, str]]
:keyword environment_variables: This is a dictionary.
:paramtype environment_variables: dict[str, str]
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword runtime_name:
:paramtype runtime_name: str
:keyword session_id:
:paramtype session_id: str
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword session_setup_mode: Possible values include: "ClientWait", "SystemWait".
:paramtype session_setup_mode: str or ~flow.models.SessionSetupModeEnum
:keyword output_data_store:
:paramtype output_data_store: str
:keyword flow_lineage_id:
:paramtype flow_lineage_id: str
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
super(SubmitBulkRunRequest, self).__init__(**kwargs)
self.flow_definition_file_path = flow_definition_file_path
self.flow_definition_resource_id = flow_definition_resource_id
self.flow_definition_data_store_name = flow_definition_data_store_name
self.flow_definition_blob_path = flow_definition_blob_path
self.flow_definition_data_uri = flow_definition_data_uri
self.run_id = run_id
self.run_display_name = run_display_name
self.run_experiment_name = run_experiment_name
self.description = description
self.tags = tags
self.properties = properties
self.node_variant = node_variant
self.variant_run_id = variant_run_id
self.baseline_run_id = baseline_run_id
self.batch_data_input = batch_data_input
self.inputs_mapping = inputs_mapping
self.connections = connections
self.environment_variables = environment_variables
self.aml_compute_name = aml_compute_name
self.runtime_name = runtime_name
self.session_id = session_id
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.session_setup_mode = session_setup_mode
self.output_data_store = output_data_store
self.flow_lineage_id = flow_lineage_id
self.run_display_name_generation_type = run_display_name_generation_type
class SubmitBulkRunResponse(msrest.serialization.Model):
"""SubmitBulkRunResponse.
:ivar next_action_interval_in_seconds:
:vartype next_action_interval_in_seconds: int
:ivar action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:vartype action_type: str or ~flow.models.ActionType
:ivar flow_runs:
:vartype flow_runs: list[any]
:ivar node_runs:
:vartype node_runs: list[any]
:ivar error_response: The error response.
:vartype error_response: ~flow.models.ErrorResponse
:ivar flow_name:
:vartype flow_name: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_graph:
:vartype flow_graph: ~flow.models.FlowGraph
:ivar flow_graph_layout:
:vartype flow_graph_layout: ~flow.models.FlowGraphLayout
:ivar flow_run_resource_id:
:vartype flow_run_resource_id: str
:ivar bulk_test_id:
:vartype bulk_test_id: str
:ivar batch_inputs:
:vartype batch_inputs: list[dict[str, any]]
:ivar batch_data_input:
:vartype batch_data_input: ~flow.models.BatchDataInput
:ivar created_by:
:vartype created_by: ~flow.models.SchemaContractsCreatedBy
:ivar created_on:
:vartype created_on: ~datetime.datetime
:ivar flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:vartype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar runtime_name:
:vartype runtime_name: str
:ivar aml_compute_name:
:vartype aml_compute_name: str
:ivar flow_run_logs: Dictionary of :code:`<string>`.
:vartype flow_run_logs: dict[str, str]
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar working_directory:
:vartype working_directory: str
:ivar flow_dag_file_relative_path:
:vartype flow_dag_file_relative_path: str
:ivar flow_snapshot_id:
:vartype flow_snapshot_id: str
:ivar variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:vartype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
_attribute_map = {
'next_action_interval_in_seconds': {'key': 'nextActionIntervalInSeconds', 'type': 'int'},
'action_type': {'key': 'actionType', 'type': 'str'},
'flow_runs': {'key': 'flow_runs', 'type': '[object]'},
'node_runs': {'key': 'node_runs', 'type': '[object]'},
'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_graph': {'key': 'flowGraph', 'type': 'FlowGraph'},
'flow_graph_layout': {'key': 'flowGraphLayout', 'type': 'FlowGraphLayout'},
'flow_run_resource_id': {'key': 'flowRunResourceId', 'type': 'str'},
'bulk_test_id': {'key': 'bulkTestId', 'type': 'str'},
'batch_inputs': {'key': 'batchInputs', 'type': '[{object}]'},
'batch_data_input': {'key': 'batchDataInput', 'type': 'BatchDataInput'},
'created_by': {'key': 'createdBy', 'type': 'SchemaContractsCreatedBy'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'flow_run_type': {'key': 'flowRunType', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'runtime_name': {'key': 'runtimeName', 'type': 'str'},
'aml_compute_name': {'key': 'amlComputeName', 'type': 'str'},
'flow_run_logs': {'key': 'flowRunLogs', 'type': '{str}'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'working_directory': {'key': 'workingDirectory', 'type': 'str'},
'flow_dag_file_relative_path': {'key': 'flowDagFileRelativePath', 'type': 'str'},
'flow_snapshot_id': {'key': 'flowSnapshotId', 'type': 'str'},
'variant_run_to_evaluation_runs_id_mapping': {'key': 'variantRunToEvaluationRunsIdMapping', 'type': '{[str]}'},
}
def __init__(
self,
*,
next_action_interval_in_seconds: Optional[int] = None,
action_type: Optional[Union[str, "ActionType"]] = None,
flow_runs: Optional[List[Any]] = None,
node_runs: Optional[List[Any]] = None,
error_response: Optional["ErrorResponse"] = None,
flow_name: Optional[str] = None,
flow_run_display_name: Optional[str] = None,
flow_run_id: Optional[str] = None,
flow_graph: Optional["FlowGraph"] = None,
flow_graph_layout: Optional["FlowGraphLayout"] = None,
flow_run_resource_id: Optional[str] = None,
bulk_test_id: Optional[str] = None,
batch_inputs: Optional[List[Dict[str, Any]]] = None,
batch_data_input: Optional["BatchDataInput"] = None,
created_by: Optional["SchemaContractsCreatedBy"] = None,
created_on: Optional[datetime.datetime] = None,
flow_run_type: Optional[Union[str, "FlowRunTypeEnum"]] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
runtime_name: Optional[str] = None,
aml_compute_name: Optional[str] = None,
flow_run_logs: Optional[Dict[str, str]] = None,
flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None,
flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None,
working_directory: Optional[str] = None,
flow_dag_file_relative_path: Optional[str] = None,
flow_snapshot_id: Optional[str] = None,
variant_run_to_evaluation_runs_id_mapping: Optional[Dict[str, List[str]]] = None,
**kwargs
):
"""
:keyword next_action_interval_in_seconds:
:paramtype next_action_interval_in_seconds: int
:keyword action_type: Possible values include: "SendValidationRequest", "GetValidationStatus",
"SubmitBulkRun", "LogRunResult", "LogRunTerminatedEvent".
:paramtype action_type: str or ~flow.models.ActionType
:keyword flow_runs:
:paramtype flow_runs: list[any]
:keyword node_runs:
:paramtype node_runs: list[any]
:keyword error_response: The error response.
:paramtype error_response: ~flow.models.ErrorResponse
:keyword flow_name:
:paramtype flow_name: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_graph:
:paramtype flow_graph: ~flow.models.FlowGraph
:keyword flow_graph_layout:
:paramtype flow_graph_layout: ~flow.models.FlowGraphLayout
:keyword flow_run_resource_id:
:paramtype flow_run_resource_id: str
:keyword bulk_test_id:
:paramtype bulk_test_id: str
:keyword batch_inputs:
:paramtype batch_inputs: list[dict[str, any]]
:keyword batch_data_input:
:paramtype batch_data_input: ~flow.models.BatchDataInput
:keyword created_by:
:paramtype created_by: ~flow.models.SchemaContractsCreatedBy
:keyword created_on:
:paramtype created_on: ~datetime.datetime
:keyword flow_run_type: Possible values include: "FlowRun", "EvaluationRun",
"PairwiseEvaluationRun", "SingleNodeRun", "FromNodeRun".
:paramtype flow_run_type: str or ~flow.models.FlowRunTypeEnum
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword runtime_name:
:paramtype runtime_name: str
:keyword aml_compute_name:
:paramtype aml_compute_name: str
:keyword flow_run_logs: Dictionary of :code:`<string>`.
:paramtype flow_run_logs: dict[str, str]
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword working_directory:
:paramtype working_directory: str
:keyword flow_dag_file_relative_path:
:paramtype flow_dag_file_relative_path: str
:keyword flow_snapshot_id:
:paramtype flow_snapshot_id: str
:keyword variant_run_to_evaluation_runs_id_mapping: Dictionary of
<components·1mlssi7·schemas·submitbulkrunresponse·properties·variantruntoevaluationrunsidmapping·additionalproperties>.
:paramtype variant_run_to_evaluation_runs_id_mapping: dict[str, list[str]]
"""
super(SubmitBulkRunResponse, self).__init__(**kwargs)
self.next_action_interval_in_seconds = next_action_interval_in_seconds
self.action_type = action_type
self.flow_runs = flow_runs
self.node_runs = node_runs
self.error_response = error_response
self.flow_name = flow_name
self.flow_run_display_name = flow_run_display_name
self.flow_run_id = flow_run_id
self.flow_graph = flow_graph
self.flow_graph_layout = flow_graph_layout
self.flow_run_resource_id = flow_run_resource_id
self.bulk_test_id = bulk_test_id
self.batch_inputs = batch_inputs
self.batch_data_input = batch_data_input
self.created_by = created_by
self.created_on = created_on
self.flow_run_type = flow_run_type
self.flow_type = flow_type
self.runtime_name = runtime_name
self.aml_compute_name = aml_compute_name
self.flow_run_logs = flow_run_logs
self.flow_test_mode = flow_test_mode
self.flow_test_infos = flow_test_infos
self.working_directory = working_directory
self.flow_dag_file_relative_path = flow_dag_file_relative_path
self.flow_snapshot_id = flow_snapshot_id
self.variant_run_to_evaluation_runs_id_mapping = variant_run_to_evaluation_runs_id_mapping
class SubmitFlowRequest(msrest.serialization.Model):
"""SubmitFlowRequest.
:ivar flow_run_id:
:vartype flow_run_id: str
:ivar flow_run_display_name:
:vartype flow_run_display_name: str
:ivar flow_id:
:vartype flow_id: str
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_submit_run_settings:
:vartype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:ivar async_submission:
:vartype async_submission: bool
:ivar use_workspace_connection:
:vartype use_workspace_connection: bool
:ivar use_flow_snapshot_to_submit:
:vartype use_flow_snapshot_to_submit: bool
:ivar enable_blob_run_artifacts:
:vartype enable_blob_run_artifacts: bool
:ivar enable_async_flow_test:
:vartype enable_async_flow_test: bool
:ivar flow_runtime_submission_api_version: Possible values include: "Version1", "Version2".
:vartype flow_runtime_submission_api_version: str or
~flow.models.FlowRuntimeSubmissionApiVersion
:ivar run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:vartype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
_attribute_map = {
'flow_run_id': {'key': 'flowRunId', 'type': 'str'},
'flow_run_display_name': {'key': 'flowRunDisplayName', 'type': 'str'},
'flow_id': {'key': 'flowId', 'type': 'str'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_submit_run_settings': {'key': 'flowSubmitRunSettings', 'type': 'FlowSubmitRunSettings'},
'async_submission': {'key': 'asyncSubmission', 'type': 'bool'},
'use_workspace_connection': {'key': 'useWorkspaceConnection', 'type': 'bool'},
'use_flow_snapshot_to_submit': {'key': 'useFlowSnapshotToSubmit', 'type': 'bool'},
'enable_blob_run_artifacts': {'key': 'enableBlobRunArtifacts', 'type': 'bool'},
'enable_async_flow_test': {'key': 'enableAsyncFlowTest', 'type': 'bool'},
'flow_runtime_submission_api_version': {'key': 'flowRuntimeSubmissionApiVersion', 'type': 'str'},
'run_display_name_generation_type': {'key': 'runDisplayNameGenerationType', 'type': 'str'},
}
def __init__(
self,
*,
flow_run_id: Optional[str] = None,
flow_run_display_name: Optional[str] = None,
flow_id: Optional[str] = None,
flow: Optional["Flow"] = None,
flow_submit_run_settings: Optional["FlowSubmitRunSettings"] = None,
async_submission: Optional[bool] = None,
use_workspace_connection: Optional[bool] = None,
use_flow_snapshot_to_submit: Optional[bool] = None,
enable_blob_run_artifacts: Optional[bool] = None,
enable_async_flow_test: Optional[bool] = None,
flow_runtime_submission_api_version: Optional[Union[str, "FlowRuntimeSubmissionApiVersion"]] = None,
run_display_name_generation_type: Optional[Union[str, "RunDisplayNameGenerationType"]] = None,
**kwargs
):
"""
:keyword flow_run_id:
:paramtype flow_run_id: str
:keyword flow_run_display_name:
:paramtype flow_run_display_name: str
:keyword flow_id:
:paramtype flow_id: str
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_submit_run_settings:
:paramtype flow_submit_run_settings: ~flow.models.FlowSubmitRunSettings
:keyword async_submission:
:paramtype async_submission: bool
:keyword use_workspace_connection:
:paramtype use_workspace_connection: bool
:keyword use_flow_snapshot_to_submit:
:paramtype use_flow_snapshot_to_submit: bool
:keyword enable_blob_run_artifacts:
:paramtype enable_blob_run_artifacts: bool
:keyword enable_async_flow_test:
:paramtype enable_async_flow_test: bool
:keyword flow_runtime_submission_api_version: Possible values include: "Version1", "Version2".
:paramtype flow_runtime_submission_api_version: str or
~flow.models.FlowRuntimeSubmissionApiVersion
:keyword run_display_name_generation_type: Possible values include: "AutoAppend",
"UserProvidedMacro".
:paramtype run_display_name_generation_type: str or ~flow.models.RunDisplayNameGenerationType
"""
super(SubmitFlowRequest, self).__init__(**kwargs)
self.flow_run_id = flow_run_id
self.flow_run_display_name = flow_run_display_name
self.flow_id = flow_id
self.flow = flow
self.flow_submit_run_settings = flow_submit_run_settings
self.async_submission = async_submission
self.use_workspace_connection = use_workspace_connection
self.use_flow_snapshot_to_submit = use_flow_snapshot_to_submit
self.enable_blob_run_artifacts = enable_blob_run_artifacts
self.enable_async_flow_test = enable_async_flow_test
self.flow_runtime_submission_api_version = flow_runtime_submission_api_version
self.run_display_name_generation_type = run_display_name_generation_type
class SubmitPipelineRunRequest(msrest.serialization.Model):
"""SubmitPipelineRunRequest.
:ivar compute_target:
:vartype compute_target: str
:ivar flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:vartype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:ivar step_tags: This is a dictionary.
:vartype step_tags: dict[str, str]
:ivar experiment_name:
:vartype experiment_name: str
:ivar pipeline_parameters: This is a dictionary.
:vartype pipeline_parameters: dict[str, str]
:ivar data_path_assignments: This is a dictionary.
:vartype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:ivar data_set_definition_value_assignments: This is a dictionary.
:vartype data_set_definition_value_assignments: dict[str, ~flow.models.DataSetDefinitionValue]
:ivar asset_output_settings_assignments: This is a dictionary.
:vartype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:ivar enable_notification:
:vartype enable_notification: bool
:ivar sub_pipelines_info:
:vartype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:ivar display_name:
:vartype display_name: str
:ivar run_id:
:vartype run_id: str
:ivar parent_run_id:
:vartype parent_run_id: str
:ivar graph:
:vartype graph: ~flow.models.GraphDraftEntity
:ivar pipeline_run_settings:
:vartype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:ivar module_node_run_settings:
:vartype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:ivar module_node_ui_input_settings:
:vartype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, str]
:ivar continue_run_on_step_failure:
:vartype continue_run_on_step_failure: bool
:ivar description:
:vartype description: str
:ivar properties: This is a dictionary.
:vartype properties: dict[str, str]
:ivar enforce_rerun:
:vartype enforce_rerun: bool
:ivar dataset_access_modes: Possible values include: "Default", "DatasetInDpv2", "AssetInDpv2",
"DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:vartype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
_attribute_map = {
'compute_target': {'key': 'computeTarget', 'type': 'str'},
'flattened_sub_graphs': {'key': 'flattenedSubGraphs', 'type': '{PipelineSubDraft}'},
'step_tags': {'key': 'stepTags', 'type': '{str}'},
'experiment_name': {'key': 'experimentName', 'type': 'str'},
'pipeline_parameters': {'key': 'pipelineParameters', 'type': '{str}'},
'data_path_assignments': {'key': 'dataPathAssignments', 'type': '{LegacyDataPath}'},
'data_set_definition_value_assignments': {'key': 'dataSetDefinitionValueAssignments', 'type': '{DataSetDefinitionValue}'},
'asset_output_settings_assignments': {'key': 'assetOutputSettingsAssignments', 'type': '{AssetOutputSettings}'},
'enable_notification': {'key': 'enableNotification', 'type': 'bool'},
'sub_pipelines_info': {'key': 'subPipelinesInfo', 'type': 'SubPipelinesInfo'},
'display_name': {'key': 'displayName', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
'graph': {'key': 'graph', 'type': 'GraphDraftEntity'},
'pipeline_run_settings': {'key': 'pipelineRunSettings', 'type': '[RunSettingParameterAssignment]'},
'module_node_run_settings': {'key': 'moduleNodeRunSettings', 'type': '[GraphModuleNodeRunSetting]'},
'module_node_ui_input_settings': {'key': 'moduleNodeUIInputSettings', 'type': '[GraphModuleNodeUIInputSetting]'},
'tags': {'key': 'tags', 'type': '{str}'},
'continue_run_on_step_failure': {'key': 'continueRunOnStepFailure', 'type': 'bool'},
'description': {'key': 'description', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
'enforce_rerun': {'key': 'enforceRerun', 'type': 'bool'},
'dataset_access_modes': {'key': 'datasetAccessModes', 'type': 'str'},
}
def __init__(
self,
*,
compute_target: Optional[str] = None,
flattened_sub_graphs: Optional[Dict[str, "PipelineSubDraft"]] = None,
step_tags: Optional[Dict[str, str]] = None,
experiment_name: Optional[str] = None,
pipeline_parameters: Optional[Dict[str, str]] = None,
data_path_assignments: Optional[Dict[str, "LegacyDataPath"]] = None,
data_set_definition_value_assignments: Optional[Dict[str, "DataSetDefinitionValue"]] = None,
asset_output_settings_assignments: Optional[Dict[str, "AssetOutputSettings"]] = None,
enable_notification: Optional[bool] = None,
sub_pipelines_info: Optional["SubPipelinesInfo"] = None,
display_name: Optional[str] = None,
run_id: Optional[str] = None,
parent_run_id: Optional[str] = None,
graph: Optional["GraphDraftEntity"] = None,
pipeline_run_settings: Optional[List["RunSettingParameterAssignment"]] = None,
module_node_run_settings: Optional[List["GraphModuleNodeRunSetting"]] = None,
module_node_ui_input_settings: Optional[List["GraphModuleNodeUIInputSetting"]] = None,
tags: Optional[Dict[str, str]] = None,
continue_run_on_step_failure: Optional[bool] = None,
description: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
enforce_rerun: Optional[bool] = None,
dataset_access_modes: Optional[Union[str, "DatasetAccessModes"]] = None,
**kwargs
):
"""
:keyword compute_target:
:paramtype compute_target: str
:keyword flattened_sub_graphs: Dictionary of :code:`<PipelineSubDraft>`.
:paramtype flattened_sub_graphs: dict[str, ~flow.models.PipelineSubDraft]
:keyword step_tags: This is a dictionary.
:paramtype step_tags: dict[str, str]
:keyword experiment_name:
:paramtype experiment_name: str
:keyword pipeline_parameters: This is a dictionary.
:paramtype pipeline_parameters: dict[str, str]
:keyword data_path_assignments: This is a dictionary.
:paramtype data_path_assignments: dict[str, ~flow.models.LegacyDataPath]
:keyword data_set_definition_value_assignments: This is a dictionary.
:paramtype data_set_definition_value_assignments: dict[str,
~flow.models.DataSetDefinitionValue]
:keyword asset_output_settings_assignments: This is a dictionary.
:paramtype asset_output_settings_assignments: dict[str, ~flow.models.AssetOutputSettings]
:keyword enable_notification:
:paramtype enable_notification: bool
:keyword sub_pipelines_info:
:paramtype sub_pipelines_info: ~flow.models.SubPipelinesInfo
:keyword display_name:
:paramtype display_name: str
:keyword run_id:
:paramtype run_id: str
:keyword parent_run_id:
:paramtype parent_run_id: str
:keyword graph:
:paramtype graph: ~flow.models.GraphDraftEntity
:keyword pipeline_run_settings:
:paramtype pipeline_run_settings: list[~flow.models.RunSettingParameterAssignment]
:keyword module_node_run_settings:
:paramtype module_node_run_settings: list[~flow.models.GraphModuleNodeRunSetting]
:keyword module_node_ui_input_settings:
:paramtype module_node_ui_input_settings: list[~flow.models.GraphModuleNodeUIInputSetting]
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, str]
:keyword continue_run_on_step_failure:
:paramtype continue_run_on_step_failure: bool
:keyword description:
:paramtype description: str
:keyword properties: This is a dictionary.
:paramtype properties: dict[str, str]
:keyword enforce_rerun:
:paramtype enforce_rerun: bool
:keyword dataset_access_modes: Possible values include: "Default", "DatasetInDpv2",
"AssetInDpv2", "DatasetInDesignerUI", "DatasetInDpv2WithDatasetInDesignerUI", "Dataset",
"AssetInDpv2WithDatasetInDesignerUI", "DatasetAndAssetInDpv2WithDatasetInDesignerUI",
"AssetInDesignerUI", "AssetInDpv2WithAssetInDesignerUI", "Asset".
:paramtype dataset_access_modes: str or ~flow.models.DatasetAccessModes
"""
super(SubmitPipelineRunRequest, self).__init__(**kwargs)
self.compute_target = compute_target
self.flattened_sub_graphs = flattened_sub_graphs
self.step_tags = step_tags
self.experiment_name = experiment_name
self.pipeline_parameters = pipeline_parameters
self.data_path_assignments = data_path_assignments
self.data_set_definition_value_assignments = data_set_definition_value_assignments
self.asset_output_settings_assignments = asset_output_settings_assignments
self.enable_notification = enable_notification
self.sub_pipelines_info = sub_pipelines_info
self.display_name = display_name
self.run_id = run_id
self.parent_run_id = parent_run_id
self.graph = graph
self.pipeline_run_settings = pipeline_run_settings
self.module_node_run_settings = module_node_run_settings
self.module_node_ui_input_settings = module_node_ui_input_settings
self.tags = tags
self.continue_run_on_step_failure = continue_run_on_step_failure
self.description = description
self.properties = properties
self.enforce_rerun = enforce_rerun
self.dataset_access_modes = dataset_access_modes
class SubPipelineDefinition(msrest.serialization.Model):
"""SubPipelineDefinition.
:ivar name:
:vartype name: str
:ivar description:
:vartype description: str
:ivar default_compute_target:
:vartype default_compute_target: ~flow.models.ComputeSetting
:ivar default_data_store:
:vartype default_data_store: ~flow.models.DatastoreSetting
:ivar pipeline_function_name:
:vartype pipeline_function_name: str
:ivar id:
:vartype id: str
:ivar parent_definition_id:
:vartype parent_definition_id: str
:ivar from_module_name:
:vartype from_module_name: str
:ivar parameter_list:
:vartype parameter_list: list[~flow.models.Kwarg]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'default_compute_target': {'key': 'defaultComputeTarget', 'type': 'ComputeSetting'},
'default_data_store': {'key': 'defaultDataStore', 'type': 'DatastoreSetting'},
'pipeline_function_name': {'key': 'pipelineFunctionName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'parent_definition_id': {'key': 'parentDefinitionId', 'type': 'str'},
'from_module_name': {'key': 'fromModuleName', 'type': 'str'},
'parameter_list': {'key': 'parameterList', 'type': '[Kwarg]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
default_compute_target: Optional["ComputeSetting"] = None,
default_data_store: Optional["DatastoreSetting"] = None,
pipeline_function_name: Optional[str] = None,
id: Optional[str] = None,
parent_definition_id: Optional[str] = None,
from_module_name: Optional[str] = None,
parameter_list: Optional[List["Kwarg"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword description:
:paramtype description: str
:keyword default_compute_target:
:paramtype default_compute_target: ~flow.models.ComputeSetting
:keyword default_data_store:
:paramtype default_data_store: ~flow.models.DatastoreSetting
:keyword pipeline_function_name:
:paramtype pipeline_function_name: str
:keyword id:
:paramtype id: str
:keyword parent_definition_id:
:paramtype parent_definition_id: str
:keyword from_module_name:
:paramtype from_module_name: str
:keyword parameter_list:
:paramtype parameter_list: list[~flow.models.Kwarg]
"""
super(SubPipelineDefinition, self).__init__(**kwargs)
self.name = name
self.description = description
self.default_compute_target = default_compute_target
self.default_data_store = default_data_store
self.pipeline_function_name = pipeline_function_name
self.id = id
self.parent_definition_id = parent_definition_id
self.from_module_name = from_module_name
self.parameter_list = parameter_list
class SubPipelineParameterAssignment(msrest.serialization.Model):
"""SubPipelineParameterAssignment.
:ivar node_id:
:vartype node_id: str
:ivar parameter_name:
:vartype parameter_name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
parameter_name: Optional[str] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword parameter_name:
:paramtype parameter_name: str
"""
super(SubPipelineParameterAssignment, self).__init__(**kwargs)
self.node_id = node_id
self.parameter_name = parameter_name
class SubPipelinesInfo(msrest.serialization.Model):
"""SubPipelinesInfo.
:ivar sub_graph_info:
:vartype sub_graph_info: list[~flow.models.SubGraphInfo]
:ivar node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:vartype node_id_to_sub_graph_id_mapping: dict[str, str]
:ivar sub_pipeline_definition:
:vartype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
_attribute_map = {
'sub_graph_info': {'key': 'subGraphInfo', 'type': '[SubGraphInfo]'},
'node_id_to_sub_graph_id_mapping': {'key': 'nodeIdToSubGraphIdMapping', 'type': '{str}'},
'sub_pipeline_definition': {'key': 'subPipelineDefinition', 'type': '[SubPipelineDefinition]'},
}
def __init__(
self,
*,
sub_graph_info: Optional[List["SubGraphInfo"]] = None,
node_id_to_sub_graph_id_mapping: Optional[Dict[str, str]] = None,
sub_pipeline_definition: Optional[List["SubPipelineDefinition"]] = None,
**kwargs
):
"""
:keyword sub_graph_info:
:paramtype sub_graph_info: list[~flow.models.SubGraphInfo]
:keyword node_id_to_sub_graph_id_mapping: Dictionary of :code:`<string>`.
:paramtype node_id_to_sub_graph_id_mapping: dict[str, str]
:keyword sub_pipeline_definition:
:paramtype sub_pipeline_definition: list[~flow.models.SubPipelineDefinition]
"""
super(SubPipelinesInfo, self).__init__(**kwargs)
self.sub_graph_info = sub_graph_info
self.node_id_to_sub_graph_id_mapping = node_id_to_sub_graph_id_mapping
self.sub_pipeline_definition = sub_pipeline_definition
class SubStatusPeriod(msrest.serialization.Model):
"""SubStatusPeriod.
:ivar name:
:vartype name: str
:ivar sub_periods:
:vartype sub_periods: list[~flow.models.SubStatusPeriod]
:ivar start:
:vartype start: long
:ivar end:
:vartype end: long
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'sub_periods': {'key': 'subPeriods', 'type': '[SubStatusPeriod]'},
'start': {'key': 'start', 'type': 'long'},
'end': {'key': 'end', 'type': 'long'},
}
def __init__(
self,
*,
name: Optional[str] = None,
sub_periods: Optional[List["SubStatusPeriod"]] = None,
start: Optional[int] = None,
end: Optional[int] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword sub_periods:
:paramtype sub_periods: list[~flow.models.SubStatusPeriod]
:keyword start:
:paramtype start: long
:keyword end:
:paramtype end: long
"""
super(SubStatusPeriod, self).__init__(**kwargs)
self.name = name
self.sub_periods = sub_periods
self.start = start
self.end = end
class SweepEarlyTerminationPolicy(msrest.serialization.Model):
"""SweepEarlyTerminationPolicy.
:ivar policy_type: Possible values include: "Bandit", "MedianStopping", "TruncationSelection".
:vartype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:ivar evaluation_interval:
:vartype evaluation_interval: int
:ivar delay_evaluation:
:vartype delay_evaluation: int
:ivar slack_factor:
:vartype slack_factor: float
:ivar slack_amount:
:vartype slack_amount: float
:ivar truncation_percentage:
:vartype truncation_percentage: int
"""
_attribute_map = {
'policy_type': {'key': 'policyType', 'type': 'str'},
'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'},
'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'},
'slack_factor': {'key': 'slackFactor', 'type': 'float'},
'slack_amount': {'key': 'slackAmount', 'type': 'float'},
'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'},
}
def __init__(
self,
*,
policy_type: Optional[Union[str, "EarlyTerminationPolicyType"]] = None,
evaluation_interval: Optional[int] = None,
delay_evaluation: Optional[int] = None,
slack_factor: Optional[float] = None,
slack_amount: Optional[float] = None,
truncation_percentage: Optional[int] = None,
**kwargs
):
"""
:keyword policy_type: Possible values include: "Bandit", "MedianStopping",
"TruncationSelection".
:paramtype policy_type: str or ~flow.models.EarlyTerminationPolicyType
:keyword evaluation_interval:
:paramtype evaluation_interval: int
:keyword delay_evaluation:
:paramtype delay_evaluation: int
:keyword slack_factor:
:paramtype slack_factor: float
:keyword slack_amount:
:paramtype slack_amount: float
:keyword truncation_percentage:
:paramtype truncation_percentage: int
"""
super(SweepEarlyTerminationPolicy, self).__init__(**kwargs)
self.policy_type = policy_type
self.evaluation_interval = evaluation_interval
self.delay_evaluation = delay_evaluation
self.slack_factor = slack_factor
self.slack_amount = slack_amount
self.truncation_percentage = truncation_percentage
class SweepSettings(msrest.serialization.Model):
"""SweepSettings.
:ivar limits:
:vartype limits: ~flow.models.SweepSettingsLimits
:ivar search_space:
:vartype search_space: list[dict[str, str]]
:ivar sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:vartype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:ivar early_termination:
:vartype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
_attribute_map = {
'limits': {'key': 'limits', 'type': 'SweepSettingsLimits'},
'search_space': {'key': 'searchSpace', 'type': '[{str}]'},
'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'},
'early_termination': {'key': 'earlyTermination', 'type': 'SweepEarlyTerminationPolicy'},
}
def __init__(
self,
*,
limits: Optional["SweepSettingsLimits"] = None,
search_space: Optional[List[Dict[str, str]]] = None,
sampling_algorithm: Optional[Union[str, "SamplingAlgorithmType"]] = None,
early_termination: Optional["SweepEarlyTerminationPolicy"] = None,
**kwargs
):
"""
:keyword limits:
:paramtype limits: ~flow.models.SweepSettingsLimits
:keyword search_space:
:paramtype search_space: list[dict[str, str]]
:keyword sampling_algorithm: Possible values include: "Random", "Grid", "Bayesian".
:paramtype sampling_algorithm: str or ~flow.models.SamplingAlgorithmType
:keyword early_termination:
:paramtype early_termination: ~flow.models.SweepEarlyTerminationPolicy
"""
super(SweepSettings, self).__init__(**kwargs)
self.limits = limits
self.search_space = search_space
self.sampling_algorithm = sampling_algorithm
self.early_termination = early_termination
class SweepSettingsLimits(msrest.serialization.Model):
"""SweepSettingsLimits.
:ivar max_total_trials:
:vartype max_total_trials: int
:ivar max_concurrent_trials:
:vartype max_concurrent_trials: int
"""
_attribute_map = {
'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'},
'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'},
}
def __init__(
self,
*,
max_total_trials: Optional[int] = None,
max_concurrent_trials: Optional[int] = None,
**kwargs
):
"""
:keyword max_total_trials:
:paramtype max_total_trials: int
:keyword max_concurrent_trials:
:paramtype max_concurrent_trials: int
"""
super(SweepSettingsLimits, self).__init__(**kwargs)
self.max_total_trials = max_total_trials
self.max_concurrent_trials = max_concurrent_trials
class SystemData(msrest.serialization.Model):
"""SystemData.
:ivar created_at:
:vartype created_at: ~datetime.datetime
:ivar created_by:
:vartype created_by: str
:ivar created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype created_by_type: str or ~flow.models.UserType
:ivar last_modified_at:
:vartype last_modified_at: ~datetime.datetime
:ivar last_modified_by:
:vartype last_modified_by: str
:ivar last_modified_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:vartype last_modified_by_type: str or ~flow.models.UserType
"""
_attribute_map = {
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
}
def __init__(
self,
*,
created_at: Optional[datetime.datetime] = None,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "UserType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "UserType"]] = None,
**kwargs
):
"""
:keyword created_at:
:paramtype created_at: ~datetime.datetime
:keyword created_by:
:paramtype created_by: str
:keyword created_by_type: Possible values include: "User", "Application", "ManagedIdentity",
"Key".
:paramtype created_by_type: str or ~flow.models.UserType
:keyword last_modified_at:
:paramtype last_modified_at: ~datetime.datetime
:keyword last_modified_by:
:paramtype last_modified_by: str
:keyword last_modified_by_type: Possible values include: "User", "Application",
"ManagedIdentity", "Key".
:paramtype last_modified_by_type: str or ~flow.models.UserType
"""
super(SystemData, self).__init__(**kwargs)
self.created_at = created_at
self.created_by = created_by
self.created_by_type = created_by_type
self.last_modified_at = last_modified_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
class SystemMeta(msrest.serialization.Model):
"""SystemMeta.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar extra_hash:
:vartype extra_hash: str
:ivar content_hash:
:vartype content_hash: str
:ivar identifier_hashes:
:vartype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:ivar extra_hashes:
:vartype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
_attribute_map = {
'identifier_hash': {'key': 'identifierHash', 'type': 'str'},
'extra_hash': {'key': 'extraHash', 'type': 'str'},
'content_hash': {'key': 'contentHash', 'type': 'str'},
'identifier_hashes': {'key': 'identifierHashes', 'type': 'SystemMetaIdentifierHashes'},
'extra_hashes': {'key': 'extraHashes', 'type': 'SystemMetaExtraHashes'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
extra_hash: Optional[str] = None,
content_hash: Optional[str] = None,
identifier_hashes: Optional["SystemMetaIdentifierHashes"] = None,
extra_hashes: Optional["SystemMetaExtraHashes"] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword extra_hash:
:paramtype extra_hash: str
:keyword content_hash:
:paramtype content_hash: str
:keyword identifier_hashes:
:paramtype identifier_hashes: ~flow.models.SystemMetaIdentifierHashes
:keyword extra_hashes:
:paramtype extra_hashes: ~flow.models.SystemMetaExtraHashes
"""
super(SystemMeta, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.extra_hash = extra_hash
self.content_hash = content_hash
self.identifier_hashes = identifier_hashes
self.extra_hashes = extra_hashes
class SystemMetaExtraHashes(msrest.serialization.Model):
"""SystemMetaExtraHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaExtraHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class SystemMetaIdentifierHashes(msrest.serialization.Model):
"""SystemMetaIdentifierHashes.
:ivar identifier_hash:
:vartype identifier_hash: str
:ivar identifier_hash_v2:
:vartype identifier_hash_v2: str
"""
_attribute_map = {
'identifier_hash': {'key': 'IdentifierHash', 'type': 'str'},
'identifier_hash_v2': {'key': 'IdentifierHashV2', 'type': 'str'},
}
def __init__(
self,
*,
identifier_hash: Optional[str] = None,
identifier_hash_v2: Optional[str] = None,
**kwargs
):
"""
:keyword identifier_hash:
:paramtype identifier_hash: str
:keyword identifier_hash_v2:
:paramtype identifier_hash_v2: str
"""
super(SystemMetaIdentifierHashes, self).__init__(**kwargs)
self.identifier_hash = identifier_hash
self.identifier_hash_v2 = identifier_hash_v2
class TargetLags(msrest.serialization.Model):
"""TargetLags.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetLagsMode
:ivar values:
:vartype values: list[int]
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'values': {'key': 'values', 'type': '[int]'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "TargetLagsMode"]] = None,
values: Optional[List[int]] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetLagsMode
:keyword values:
:paramtype values: list[int]
"""
super(TargetLags, self).__init__(**kwargs)
self.mode = mode
self.values = values
class TargetRollingWindowSize(msrest.serialization.Model):
"""TargetRollingWindowSize.
:ivar mode: Possible values include: "Auto", "Custom".
:vartype mode: str or ~flow.models.TargetRollingWindowSizeMode
:ivar value:
:vartype value: int
"""
_attribute_map = {
'mode': {'key': 'mode', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
}
def __init__(
self,
*,
mode: Optional[Union[str, "TargetRollingWindowSizeMode"]] = None,
value: Optional[int] = None,
**kwargs
):
"""
:keyword mode: Possible values include: "Auto", "Custom".
:paramtype mode: str or ~flow.models.TargetRollingWindowSizeMode
:keyword value:
:paramtype value: int
"""
super(TargetRollingWindowSize, self).__init__(**kwargs)
self.mode = mode
self.value = value
class TargetSelectorConfiguration(msrest.serialization.Model):
"""TargetSelectorConfiguration.
:ivar low_priority_vm_tolerant:
:vartype low_priority_vm_tolerant: bool
:ivar cluster_block_list:
:vartype cluster_block_list: list[str]
:ivar compute_type:
:vartype compute_type: str
:ivar instance_type:
:vartype instance_type: list[str]
:ivar instance_types:
:vartype instance_types: list[str]
:ivar my_resource_only:
:vartype my_resource_only: bool
:ivar plan_id:
:vartype plan_id: str
:ivar plan_region_id:
:vartype plan_region_id: str
:ivar region:
:vartype region: list[str]
:ivar regions:
:vartype regions: list[str]
:ivar vc_block_list:
:vartype vc_block_list: list[str]
"""
_attribute_map = {
'low_priority_vm_tolerant': {'key': 'lowPriorityVMTolerant', 'type': 'bool'},
'cluster_block_list': {'key': 'clusterBlockList', 'type': '[str]'},
'compute_type': {'key': 'computeType', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': '[str]'},
'instance_types': {'key': 'instanceTypes', 'type': '[str]'},
'my_resource_only': {'key': 'myResourceOnly', 'type': 'bool'},
'plan_id': {'key': 'planId', 'type': 'str'},
'plan_region_id': {'key': 'planRegionId', 'type': 'str'},
'region': {'key': 'region', 'type': '[str]'},
'regions': {'key': 'regions', 'type': '[str]'},
'vc_block_list': {'key': 'vcBlockList', 'type': '[str]'},
}
def __init__(
self,
*,
low_priority_vm_tolerant: Optional[bool] = None,
cluster_block_list: Optional[List[str]] = None,
compute_type: Optional[str] = None,
instance_type: Optional[List[str]] = None,
instance_types: Optional[List[str]] = None,
my_resource_only: Optional[bool] = None,
plan_id: Optional[str] = None,
plan_region_id: Optional[str] = None,
region: Optional[List[str]] = None,
regions: Optional[List[str]] = None,
vc_block_list: Optional[List[str]] = None,
**kwargs
):
"""
:keyword low_priority_vm_tolerant:
:paramtype low_priority_vm_tolerant: bool
:keyword cluster_block_list:
:paramtype cluster_block_list: list[str]
:keyword compute_type:
:paramtype compute_type: str
:keyword instance_type:
:paramtype instance_type: list[str]
:keyword instance_types:
:paramtype instance_types: list[str]
:keyword my_resource_only:
:paramtype my_resource_only: bool
:keyword plan_id:
:paramtype plan_id: str
:keyword plan_region_id:
:paramtype plan_region_id: str
:keyword region:
:paramtype region: list[str]
:keyword regions:
:paramtype regions: list[str]
:keyword vc_block_list:
:paramtype vc_block_list: list[str]
"""
super(TargetSelectorConfiguration, self).__init__(**kwargs)
self.low_priority_vm_tolerant = low_priority_vm_tolerant
self.cluster_block_list = cluster_block_list
self.compute_type = compute_type
self.instance_type = instance_type
self.instance_types = instance_types
self.my_resource_only = my_resource_only
self.plan_id = plan_id
self.plan_region_id = plan_region_id
self.region = region
self.regions = regions
self.vc_block_list = vc_block_list
class Task(msrest.serialization.Model):
"""Task.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id:
:vartype id: int
:ivar exception: Anything.
:vartype exception: any
:ivar status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:vartype status: str or ~flow.models.TaskStatus
:ivar is_canceled:
:vartype is_canceled: bool
:ivar is_completed:
:vartype is_completed: bool
:ivar is_completed_successfully:
:vartype is_completed_successfully: bool
:ivar creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:vartype creation_options: str or ~flow.models.TaskCreationOptions
:ivar async_state: Anything.
:vartype async_state: any
:ivar is_faulted:
:vartype is_faulted: bool
"""
_validation = {
'id': {'readonly': True},
'exception': {'readonly': True},
'is_canceled': {'readonly': True},
'is_completed': {'readonly': True},
'is_completed_successfully': {'readonly': True},
'async_state': {'readonly': True},
'is_faulted': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'exception': {'key': 'exception', 'type': 'object'},
'status': {'key': 'status', 'type': 'str'},
'is_canceled': {'key': 'isCanceled', 'type': 'bool'},
'is_completed': {'key': 'isCompleted', 'type': 'bool'},
'is_completed_successfully': {'key': 'isCompletedSuccessfully', 'type': 'bool'},
'creation_options': {'key': 'creationOptions', 'type': 'str'},
'async_state': {'key': 'asyncState', 'type': 'object'},
'is_faulted': {'key': 'isFaulted', 'type': 'bool'},
}
def __init__(
self,
*,
status: Optional[Union[str, "TaskStatus"]] = None,
creation_options: Optional[Union[str, "TaskCreationOptions"]] = None,
**kwargs
):
"""
:keyword status: Possible values include: "Created", "WaitingForActivation", "WaitingToRun",
"Running", "WaitingForChildrenToComplete", "RanToCompletion", "Canceled", "Faulted".
:paramtype status: str or ~flow.models.TaskStatus
:keyword creation_options: Possible values include: "None", "PreferFairness", "LongRunning",
"AttachedToParent", "DenyChildAttach", "HideScheduler", "RunContinuationsAsynchronously".
:paramtype creation_options: str or ~flow.models.TaskCreationOptions
"""
super(Task, self).__init__(**kwargs)
self.id = None
self.exception = None
self.status = status
self.is_canceled = None
self.is_completed = None
self.is_completed_successfully = None
self.creation_options = creation_options
self.async_state = None
self.is_faulted = None
class TaskControlFlowInfo(msrest.serialization.Model):
"""TaskControlFlowInfo.
:ivar control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:vartype control_flow_type: str or ~flow.models.ControlFlowType
:ivar iteration_index:
:vartype iteration_index: int
:ivar item_name:
:vartype item_name: str
:ivar parameters_overwritten: Dictionary of :code:`<string>`.
:vartype parameters_overwritten: dict[str, str]
:ivar is_reused:
:vartype is_reused: bool
"""
_attribute_map = {
'control_flow_type': {'key': 'controlFlowType', 'type': 'str'},
'iteration_index': {'key': 'iterationIndex', 'type': 'int'},
'item_name': {'key': 'itemName', 'type': 'str'},
'parameters_overwritten': {'key': 'parametersOverwritten', 'type': '{str}'},
'is_reused': {'key': 'isReused', 'type': 'bool'},
}
def __init__(
self,
*,
control_flow_type: Optional[Union[str, "ControlFlowType"]] = None,
iteration_index: Optional[int] = None,
item_name: Optional[str] = None,
parameters_overwritten: Optional[Dict[str, str]] = None,
is_reused: Optional[bool] = None,
**kwargs
):
"""
:keyword control_flow_type: Possible values include: "None", "DoWhile", "ParallelFor".
:paramtype control_flow_type: str or ~flow.models.ControlFlowType
:keyword iteration_index:
:paramtype iteration_index: int
:keyword item_name:
:paramtype item_name: str
:keyword parameters_overwritten: Dictionary of :code:`<string>`.
:paramtype parameters_overwritten: dict[str, str]
:keyword is_reused:
:paramtype is_reused: bool
"""
super(TaskControlFlowInfo, self).__init__(**kwargs)
self.control_flow_type = control_flow_type
self.iteration_index = iteration_index
self.item_name = item_name
self.parameters_overwritten = parameters_overwritten
self.is_reused = is_reused
class TaskReuseInfo(msrest.serialization.Model):
"""TaskReuseInfo.
:ivar experiment_id:
:vartype experiment_id: str
:ivar pipeline_run_id:
:vartype pipeline_run_id: str
:ivar node_id:
:vartype node_id: str
:ivar request_id:
:vartype request_id: str
:ivar run_id:
:vartype run_id: str
:ivar node_start_time:
:vartype node_start_time: ~datetime.datetime
:ivar node_end_time:
:vartype node_end_time: ~datetime.datetime
"""
_attribute_map = {
'experiment_id': {'key': 'experimentId', 'type': 'str'},
'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'request_id': {'key': 'requestId', 'type': 'str'},
'run_id': {'key': 'runId', 'type': 'str'},
'node_start_time': {'key': 'nodeStartTime', 'type': 'iso-8601'},
'node_end_time': {'key': 'nodeEndTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
experiment_id: Optional[str] = None,
pipeline_run_id: Optional[str] = None,
node_id: Optional[str] = None,
request_id: Optional[str] = None,
run_id: Optional[str] = None,
node_start_time: Optional[datetime.datetime] = None,
node_end_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword experiment_id:
:paramtype experiment_id: str
:keyword pipeline_run_id:
:paramtype pipeline_run_id: str
:keyword node_id:
:paramtype node_id: str
:keyword request_id:
:paramtype request_id: str
:keyword run_id:
:paramtype run_id: str
:keyword node_start_time:
:paramtype node_start_time: ~datetime.datetime
:keyword node_end_time:
:paramtype node_end_time: ~datetime.datetime
"""
super(TaskReuseInfo, self).__init__(**kwargs)
self.experiment_id = experiment_id
self.pipeline_run_id = pipeline_run_id
self.node_id = node_id
self.request_id = request_id
self.run_id = run_id
self.node_start_time = node_start_time
self.node_end_time = node_end_time
class TensorflowConfiguration(msrest.serialization.Model):
"""TensorflowConfiguration.
:ivar worker_count:
:vartype worker_count: int
:ivar parameter_server_count:
:vartype parameter_server_count: int
"""
_attribute_map = {
'worker_count': {'key': 'workerCount', 'type': 'int'},
'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
}
def __init__(
self,
*,
worker_count: Optional[int] = None,
parameter_server_count: Optional[int] = None,
**kwargs
):
"""
:keyword worker_count:
:paramtype worker_count: int
:keyword parameter_server_count:
:paramtype parameter_server_count: int
"""
super(TensorflowConfiguration, self).__init__(**kwargs)
self.worker_count = worker_count
self.parameter_server_count = parameter_server_count
class TestDataSettings(msrest.serialization.Model):
"""TestDataSettings.
:ivar test_data_size:
:vartype test_data_size: float
"""
_attribute_map = {
'test_data_size': {'key': 'testDataSize', 'type': 'float'},
}
def __init__(
self,
*,
test_data_size: Optional[float] = None,
**kwargs
):
"""
:keyword test_data_size:
:paramtype test_data_size: float
"""
super(TestDataSettings, self).__init__(**kwargs)
self.test_data_size = test_data_size
class Tool(msrest.serialization.Model):
"""Tool.
:ivar name:
:vartype name: str
:ivar type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:vartype type: str or ~flow.models.ToolType
:ivar inputs: This is a dictionary.
:vartype inputs: dict[str, ~flow.models.InputDefinition]
:ivar outputs: This is a dictionary.
:vartype outputs: dict[str, ~flow.models.OutputDefinition]
:ivar description:
:vartype description: str
:ivar connection_type:
:vartype connection_type: list[str or ~flow.models.ConnectionType]
:ivar module:
:vartype module: str
:ivar class_name:
:vartype class_name: str
:ivar source:
:vartype source: str
:ivar lkg_code:
:vartype lkg_code: str
:ivar code:
:vartype code: str
:ivar function:
:vartype function: str
:ivar action_type:
:vartype action_type: str
:ivar provider_config: This is a dictionary.
:vartype provider_config: dict[str, ~flow.models.InputDefinition]
:ivar function_config: This is a dictionary.
:vartype function_config: dict[str, ~flow.models.InputDefinition]
:ivar icon: Anything.
:vartype icon: any
:ivar category:
:vartype category: str
:ivar tags: A set of tags. This is a dictionary.
:vartype tags: dict[str, any]
:ivar is_builtin:
:vartype is_builtin: bool
:ivar package:
:vartype package: str
:ivar package_version:
:vartype package_version: str
:ivar default_prompt:
:vartype default_prompt: str
:ivar enable_kwargs:
:vartype enable_kwargs: bool
:ivar deprecated_tools:
:vartype deprecated_tools: list[str]
:ivar tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:vartype tool_state: str or ~flow.models.ToolState
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'inputs': {'key': 'inputs', 'type': '{InputDefinition}'},
'outputs': {'key': 'outputs', 'type': '{OutputDefinition}'},
'description': {'key': 'description', 'type': 'str'},
'connection_type': {'key': 'connection_type', 'type': '[str]'},
'module': {'key': 'module', 'type': 'str'},
'class_name': {'key': 'class_name', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'lkg_code': {'key': 'lkgCode', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
'function': {'key': 'function', 'type': 'str'},
'action_type': {'key': 'action_type', 'type': 'str'},
'provider_config': {'key': 'provider_config', 'type': '{InputDefinition}'},
'function_config': {'key': 'function_config', 'type': '{InputDefinition}'},
'icon': {'key': 'icon', 'type': 'object'},
'category': {'key': 'category', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{object}'},
'is_builtin': {'key': 'is_builtin', 'type': 'bool'},
'package': {'key': 'package', 'type': 'str'},
'package_version': {'key': 'package_version', 'type': 'str'},
'default_prompt': {'key': 'default_prompt', 'type': 'str'},
'enable_kwargs': {'key': 'enable_kwargs', 'type': 'bool'},
'deprecated_tools': {'key': 'deprecated_tools', 'type': '[str]'},
'tool_state': {'key': 'tool_state', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
type: Optional[Union[str, "ToolType"]] = None,
inputs: Optional[Dict[str, "InputDefinition"]] = None,
outputs: Optional[Dict[str, "OutputDefinition"]] = None,
description: Optional[str] = None,
connection_type: Optional[List[Union[str, "ConnectionType"]]] = None,
module: Optional[str] = None,
class_name: Optional[str] = None,
source: Optional[str] = None,
lkg_code: Optional[str] = None,
code: Optional[str] = None,
function: Optional[str] = None,
action_type: Optional[str] = None,
provider_config: Optional[Dict[str, "InputDefinition"]] = None,
function_config: Optional[Dict[str, "InputDefinition"]] = None,
icon: Optional[Any] = None,
category: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
is_builtin: Optional[bool] = None,
package: Optional[str] = None,
package_version: Optional[str] = None,
default_prompt: Optional[str] = None,
enable_kwargs: Optional[bool] = None,
deprecated_tools: Optional[List[str]] = None,
tool_state: Optional[Union[str, "ToolState"]] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword type: Possible values include: "llm", "python", "action", "prompt", "custom_llm",
"csharp".
:paramtype type: str or ~flow.models.ToolType
:keyword inputs: This is a dictionary.
:paramtype inputs: dict[str, ~flow.models.InputDefinition]
:keyword outputs: This is a dictionary.
:paramtype outputs: dict[str, ~flow.models.OutputDefinition]
:keyword description:
:paramtype description: str
:keyword connection_type:
:paramtype connection_type: list[str or ~flow.models.ConnectionType]
:keyword module:
:paramtype module: str
:keyword class_name:
:paramtype class_name: str
:keyword source:
:paramtype source: str
:keyword lkg_code:
:paramtype lkg_code: str
:keyword code:
:paramtype code: str
:keyword function:
:paramtype function: str
:keyword action_type:
:paramtype action_type: str
:keyword provider_config: This is a dictionary.
:paramtype provider_config: dict[str, ~flow.models.InputDefinition]
:keyword function_config: This is a dictionary.
:paramtype function_config: dict[str, ~flow.models.InputDefinition]
:keyword icon: Anything.
:paramtype icon: any
:keyword category:
:paramtype category: str
:keyword tags: A set of tags. This is a dictionary.
:paramtype tags: dict[str, any]
:keyword is_builtin:
:paramtype is_builtin: bool
:keyword package:
:paramtype package: str
:keyword package_version:
:paramtype package_version: str
:keyword default_prompt:
:paramtype default_prompt: str
:keyword enable_kwargs:
:paramtype enable_kwargs: bool
:keyword deprecated_tools:
:paramtype deprecated_tools: list[str]
:keyword tool_state: Possible values include: "Stable", "Preview", "Deprecated".
:paramtype tool_state: str or ~flow.models.ToolState
"""
super(Tool, self).__init__(**kwargs)
self.name = name
self.type = type
self.inputs = inputs
self.outputs = outputs
self.description = description
self.connection_type = connection_type
self.module = module
self.class_name = class_name
self.source = source
self.lkg_code = lkg_code
self.code = code
self.function = function
self.action_type = action_type
self.provider_config = provider_config
self.function_config = function_config
self.icon = icon
self.category = category
self.tags = tags
self.is_builtin = is_builtin
self.package = package
self.package_version = package_version
self.default_prompt = default_prompt
self.enable_kwargs = enable_kwargs
self.deprecated_tools = deprecated_tools
self.tool_state = tool_state
class ToolFuncResponse(msrest.serialization.Model):
"""ToolFuncResponse.
:ivar result: Anything.
:vartype result: any
:ivar logs: This is a dictionary.
:vartype logs: dict[str, str]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'object'},
'logs': {'key': 'logs', 'type': '{str}'},
}
def __init__(
self,
*,
result: Optional[Any] = None,
logs: Optional[Dict[str, str]] = None,
**kwargs
):
"""
:keyword result: Anything.
:paramtype result: any
:keyword logs: This is a dictionary.
:paramtype logs: dict[str, str]
"""
super(ToolFuncResponse, self).__init__(**kwargs)
self.result = result
self.logs = logs
class ToolInputDynamicList(msrest.serialization.Model):
"""ToolInputDynamicList.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
}
def __init__(
self,
*,
func_path: Optional[str] = None,
func_kwargs: Optional[List[Dict[str, Any]]] = None,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
"""
super(ToolInputDynamicList, self).__init__(**kwargs)
self.func_path = func_path
self.func_kwargs = func_kwargs
class ToolInputGeneratedBy(msrest.serialization.Model):
"""ToolInputGeneratedBy.
:ivar func_path:
:vartype func_path: str
:ivar func_kwargs:
:vartype func_kwargs: list[dict[str, any]]
:ivar reverse_func_path:
:vartype reverse_func_path: str
"""
_attribute_map = {
'func_path': {'key': 'func_path', 'type': 'str'},
'func_kwargs': {'key': 'func_kwargs', 'type': '[{object}]'},
'reverse_func_path': {'key': 'reverse_func_path', 'type': 'str'},
}
def __init__(
self,
*,
func_path: Optional[str] = None,
func_kwargs: Optional[List[Dict[str, Any]]] = None,
reverse_func_path: Optional[str] = None,
**kwargs
):
"""
:keyword func_path:
:paramtype func_path: str
:keyword func_kwargs:
:paramtype func_kwargs: list[dict[str, any]]
:keyword reverse_func_path:
:paramtype reverse_func_path: str
"""
super(ToolInputGeneratedBy, self).__init__(**kwargs)
self.func_path = func_path
self.func_kwargs = func_kwargs
self.reverse_func_path = reverse_func_path
class ToolMetaDto(msrest.serialization.Model):
"""ToolMetaDto.
:ivar tools: This is a dictionary.
:vartype tools: dict[str, ~flow.models.Tool]
:ivar errors: This is a dictionary.
:vartype errors: dict[str, ~flow.models.ErrorResponse]
"""
_attribute_map = {
'tools': {'key': 'tools', 'type': '{Tool}'},
'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
}
def __init__(
self,
*,
tools: Optional[Dict[str, "Tool"]] = None,
errors: Optional[Dict[str, "ErrorResponse"]] = None,
**kwargs
):
"""
:keyword tools: This is a dictionary.
:paramtype tools: dict[str, ~flow.models.Tool]
:keyword errors: This is a dictionary.
:paramtype errors: dict[str, ~flow.models.ErrorResponse]
"""
super(ToolMetaDto, self).__init__(**kwargs)
self.tools = tools
self.errors = errors
class ToolSetting(msrest.serialization.Model):
"""ToolSetting.
:ivar providers:
:vartype providers: list[~flow.models.ProviderEntity]
"""
_attribute_map = {
'providers': {'key': 'providers', 'type': '[ProviderEntity]'},
}
def __init__(
self,
*,
providers: Optional[List["ProviderEntity"]] = None,
**kwargs
):
"""
:keyword providers:
:paramtype providers: list[~flow.models.ProviderEntity]
"""
super(ToolSetting, self).__init__(**kwargs)
self.providers = providers
class ToolSourceMeta(msrest.serialization.Model):
"""ToolSourceMeta.
:ivar tool_type:
:vartype tool_type: str
"""
_attribute_map = {
'tool_type': {'key': 'tool_type', 'type': 'str'},
}
def __init__(
self,
*,
tool_type: Optional[str] = None,
**kwargs
):
"""
:keyword tool_type:
:paramtype tool_type: str
"""
super(ToolSourceMeta, self).__init__(**kwargs)
self.tool_type = tool_type
class TorchDistributedConfiguration(msrest.serialization.Model):
"""TorchDistributedConfiguration.
:ivar process_count_per_node:
:vartype process_count_per_node: int
"""
_attribute_map = {
'process_count_per_node': {'key': 'processCountPerNode', 'type': 'int'},
}
def __init__(
self,
*,
process_count_per_node: Optional[int] = None,
**kwargs
):
"""
:keyword process_count_per_node:
:paramtype process_count_per_node: int
"""
super(TorchDistributedConfiguration, self).__init__(**kwargs)
self.process_count_per_node = process_count_per_node
class TrainingDiagnosticConfiguration(msrest.serialization.Model):
"""TrainingDiagnosticConfiguration.
:ivar job_heart_beat_timeout_seconds:
:vartype job_heart_beat_timeout_seconds: int
"""
_attribute_map = {
'job_heart_beat_timeout_seconds': {'key': 'jobHeartBeatTimeoutSeconds', 'type': 'int'},
}
def __init__(
self,
*,
job_heart_beat_timeout_seconds: Optional[int] = None,
**kwargs
):
"""
:keyword job_heart_beat_timeout_seconds:
:paramtype job_heart_beat_timeout_seconds: int
"""
super(TrainingDiagnosticConfiguration, self).__init__(**kwargs)
self.job_heart_beat_timeout_seconds = job_heart_beat_timeout_seconds
class TrainingOutput(msrest.serialization.Model):
"""TrainingOutput.
:ivar training_output_type: Possible values include: "Metrics", "Model".
:vartype training_output_type: str or ~flow.models.TrainingOutputType
:ivar iteration:
:vartype iteration: int
:ivar metric:
:vartype metric: str
:ivar model_file:
:vartype model_file: str
"""
_attribute_map = {
'training_output_type': {'key': 'trainingOutputType', 'type': 'str'},
'iteration': {'key': 'iteration', 'type': 'int'},
'metric': {'key': 'metric', 'type': 'str'},
'model_file': {'key': 'modelFile', 'type': 'str'},
}
def __init__(
self,
*,
training_output_type: Optional[Union[str, "TrainingOutputType"]] = None,
iteration: Optional[int] = None,
metric: Optional[str] = None,
model_file: Optional[str] = None,
**kwargs
):
"""
:keyword training_output_type: Possible values include: "Metrics", "Model".
:paramtype training_output_type: str or ~flow.models.TrainingOutputType
:keyword iteration:
:paramtype iteration: int
:keyword metric:
:paramtype metric: str
:keyword model_file:
:paramtype model_file: str
"""
super(TrainingOutput, self).__init__(**kwargs)
self.training_output_type = training_output_type
self.iteration = iteration
self.metric = metric
self.model_file = model_file
class TrainingSettings(msrest.serialization.Model):
"""TrainingSettings.
:ivar block_list_models:
:vartype block_list_models: list[str]
:ivar allow_list_models:
:vartype allow_list_models: list[str]
:ivar enable_dnn_training:
:vartype enable_dnn_training: bool
:ivar enable_onnx_compatible_models:
:vartype enable_onnx_compatible_models: bool
:ivar stack_ensemble_settings:
:vartype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:ivar enable_stack_ensemble:
:vartype enable_stack_ensemble: bool
:ivar enable_vote_ensemble:
:vartype enable_vote_ensemble: bool
:ivar ensemble_model_download_timeout:
:vartype ensemble_model_download_timeout: str
:ivar enable_model_explainability:
:vartype enable_model_explainability: bool
:ivar training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:vartype training_mode: str or ~flow.models.TabularTrainingMode
"""
_attribute_map = {
'block_list_models': {'key': 'blockListModels', 'type': '[str]'},
'allow_list_models': {'key': 'allowListModels', 'type': '[str]'},
'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'},
'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'},
'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'},
'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'},
'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'},
'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'str'},
'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'},
'training_mode': {'key': 'trainingMode', 'type': 'str'},
}
def __init__(
self,
*,
block_list_models: Optional[List[str]] = None,
allow_list_models: Optional[List[str]] = None,
enable_dnn_training: Optional[bool] = None,
enable_onnx_compatible_models: Optional[bool] = None,
stack_ensemble_settings: Optional["StackEnsembleSettings"] = None,
enable_stack_ensemble: Optional[bool] = None,
enable_vote_ensemble: Optional[bool] = None,
ensemble_model_download_timeout: Optional[str] = None,
enable_model_explainability: Optional[bool] = None,
training_mode: Optional[Union[str, "TabularTrainingMode"]] = None,
**kwargs
):
"""
:keyword block_list_models:
:paramtype block_list_models: list[str]
:keyword allow_list_models:
:paramtype allow_list_models: list[str]
:keyword enable_dnn_training:
:paramtype enable_dnn_training: bool
:keyword enable_onnx_compatible_models:
:paramtype enable_onnx_compatible_models: bool
:keyword stack_ensemble_settings:
:paramtype stack_ensemble_settings: ~flow.models.StackEnsembleSettings
:keyword enable_stack_ensemble:
:paramtype enable_stack_ensemble: bool
:keyword enable_vote_ensemble:
:paramtype enable_vote_ensemble: bool
:keyword ensemble_model_download_timeout:
:paramtype ensemble_model_download_timeout: str
:keyword enable_model_explainability:
:paramtype enable_model_explainability: bool
:keyword training_mode: Possible values include: "Distributed", "NonDistributed", "Auto".
:paramtype training_mode: str or ~flow.models.TabularTrainingMode
"""
super(TrainingSettings, self).__init__(**kwargs)
self.block_list_models = block_list_models
self.allow_list_models = allow_list_models
self.enable_dnn_training = enable_dnn_training
self.enable_onnx_compatible_models = enable_onnx_compatible_models
self.stack_ensemble_settings = stack_ensemble_settings
self.enable_stack_ensemble = enable_stack_ensemble
self.enable_vote_ensemble = enable_vote_ensemble
self.ensemble_model_download_timeout = ensemble_model_download_timeout
self.enable_model_explainability = enable_model_explainability
self.training_mode = training_mode
class TriggerAsyncOperationStatus(msrest.serialization.Model):
"""TriggerAsyncOperationStatus.
:ivar id:
:vartype id: str
:ivar operation_type: Possible values include: "Create", "Update", "Delete", "CreateOrUpdate".
:vartype operation_type: str or ~flow.models.TriggerOperationType
:ivar provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:vartype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:ivar created_time:
:vartype created_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
:ivar error: The error response.
:vartype error: ~flow.models.ErrorResponse
:ivar status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:vartype status_code: str or ~flow.models.HttpStatusCode
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'operation_type': {'key': 'operationType', 'type': 'str'},
'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'error': {'key': 'error', 'type': 'ErrorResponse'},
'status_code': {'key': 'statusCode', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
operation_type: Optional[Union[str, "TriggerOperationType"]] = None,
provisioning_status: Optional[Union[str, "ScheduleProvisioningStatus"]] = None,
created_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
error: Optional["ErrorResponse"] = None,
status_code: Optional[Union[str, "HttpStatusCode"]] = None,
**kwargs
):
"""
:keyword id:
:paramtype id: str
:keyword operation_type: Possible values include: "Create", "Update", "Delete",
"CreateOrUpdate".
:paramtype operation_type: str or ~flow.models.TriggerOperationType
:keyword provisioning_status: Possible values include: "Creating", "Updating", "Deleting",
"Succeeded", "Failed", "Canceled".
:paramtype provisioning_status: str or ~flow.models.ScheduleProvisioningStatus
:keyword created_time:
:paramtype created_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
:keyword error: The error response.
:paramtype error: ~flow.models.ErrorResponse
:keyword status_code: Possible values include: "Continue", "SwitchingProtocols", "Processing",
"EarlyHints", "OK", "Created", "Accepted", "NonAuthoritativeInformation", "NoContent",
"ResetContent", "PartialContent", "MultiStatus", "AlreadyReported", "IMUsed",
"MultipleChoices", "Ambiguous", "MovedPermanently", "Moved", "Found", "Redirect", "SeeOther",
"RedirectMethod", "NotModified", "UseProxy", "Unused", "TemporaryRedirect", "RedirectKeepVerb",
"PermanentRedirect", "BadRequest", "Unauthorized", "PaymentRequired", "Forbidden", "NotFound",
"MethodNotAllowed", "NotAcceptable", "ProxyAuthenticationRequired", "RequestTimeout",
"Conflict", "Gone", "LengthRequired", "PreconditionFailed", "RequestEntityTooLarge",
"RequestUriTooLong", "UnsupportedMediaType", "RequestedRangeNotSatisfiable",
"ExpectationFailed", "MisdirectedRequest", "UnprocessableEntity", "Locked", "FailedDependency",
"UpgradeRequired", "PreconditionRequired", "TooManyRequests", "RequestHeaderFieldsTooLarge",
"UnavailableForLegalReasons", "InternalServerError", "NotImplemented", "BadGateway",
"ServiceUnavailable", "GatewayTimeout", "HttpVersionNotSupported", "VariantAlsoNegotiates",
"InsufficientStorage", "LoopDetected", "NotExtended", "NetworkAuthenticationRequired".
:paramtype status_code: str or ~flow.models.HttpStatusCode
"""
super(TriggerAsyncOperationStatus, self).__init__(**kwargs)
self.id = id
self.operation_type = operation_type
self.provisioning_status = provisioning_status
self.created_time = created_time
self.end_time = end_time
self.error = error
self.status_code = status_code
class TuningNodeSetting(msrest.serialization.Model):
"""TuningNodeSetting.
:ivar variant_ids:
:vartype variant_ids: list[str]
"""
_attribute_map = {
'variant_ids': {'key': 'variantIds', 'type': '[str]'},
}
def __init__(
self,
*,
variant_ids: Optional[List[str]] = None,
**kwargs
):
"""
:keyword variant_ids:
:paramtype variant_ids: list[str]
"""
super(TuningNodeSetting, self).__init__(**kwargs)
self.variant_ids = variant_ids
class TypedAssetReference(msrest.serialization.Model):
"""TypedAssetReference.
:ivar asset_id:
:vartype asset_id: str
:ivar type:
:vartype type: str
"""
_attribute_map = {
'asset_id': {'key': 'assetId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
asset_id: Optional[str] = None,
type: Optional[str] = None,
**kwargs
):
"""
:keyword asset_id:
:paramtype asset_id: str
:keyword type:
:paramtype type: str
"""
super(TypedAssetReference, self).__init__(**kwargs)
self.asset_id = asset_id
self.type = type
class UIAzureOpenAIDeploymentNameSelector(msrest.serialization.Model):
"""UIAzureOpenAIDeploymentNameSelector.
:ivar capabilities:
:vartype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
_attribute_map = {
'capabilities': {'key': 'Capabilities', 'type': 'UIAzureOpenAIModelCapabilities'},
}
def __init__(
self,
*,
capabilities: Optional["UIAzureOpenAIModelCapabilities"] = None,
**kwargs
):
"""
:keyword capabilities:
:paramtype capabilities: ~flow.models.UIAzureOpenAIModelCapabilities
"""
super(UIAzureOpenAIDeploymentNameSelector, self).__init__(**kwargs)
self.capabilities = capabilities
class UIAzureOpenAIModelCapabilities(msrest.serialization.Model):
"""UIAzureOpenAIModelCapabilities.
:ivar completion:
:vartype completion: bool
:ivar chat_completion:
:vartype chat_completion: bool
:ivar embeddings:
:vartype embeddings: bool
"""
_attribute_map = {
'completion': {'key': 'Completion', 'type': 'bool'},
'chat_completion': {'key': 'ChatCompletion', 'type': 'bool'},
'embeddings': {'key': 'Embeddings', 'type': 'bool'},
}
def __init__(
self,
*,
completion: Optional[bool] = None,
chat_completion: Optional[bool] = None,
embeddings: Optional[bool] = None,
**kwargs
):
"""
:keyword completion:
:paramtype completion: bool
:keyword chat_completion:
:paramtype chat_completion: bool
:keyword embeddings:
:paramtype embeddings: bool
"""
super(UIAzureOpenAIModelCapabilities, self).__init__(**kwargs)
self.completion = completion
self.chat_completion = chat_completion
self.embeddings = embeddings
class UIColumnPicker(msrest.serialization.Model):
"""UIColumnPicker.
:ivar column_picker_for:
:vartype column_picker_for: str
:ivar column_selection_categories:
:vartype column_selection_categories: list[str]
:ivar single_column_selection:
:vartype single_column_selection: bool
"""
_attribute_map = {
'column_picker_for': {'key': 'columnPickerFor', 'type': 'str'},
'column_selection_categories': {'key': 'columnSelectionCategories', 'type': '[str]'},
'single_column_selection': {'key': 'singleColumnSelection', 'type': 'bool'},
}
def __init__(
self,
*,
column_picker_for: Optional[str] = None,
column_selection_categories: Optional[List[str]] = None,
single_column_selection: Optional[bool] = None,
**kwargs
):
"""
:keyword column_picker_for:
:paramtype column_picker_for: str
:keyword column_selection_categories:
:paramtype column_selection_categories: list[str]
:keyword single_column_selection:
:paramtype single_column_selection: bool
"""
super(UIColumnPicker, self).__init__(**kwargs)
self.column_picker_for = column_picker_for
self.column_selection_categories = column_selection_categories
self.single_column_selection = single_column_selection
class UIComputeSelection(msrest.serialization.Model):
"""UIComputeSelection.
:ivar compute_types:
:vartype compute_types: list[str]
:ivar require_gpu:
:vartype require_gpu: bool
:ivar os_types:
:vartype os_types: list[str]
:ivar support_serverless:
:vartype support_serverless: bool
:ivar compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:vartype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
_attribute_map = {
'compute_types': {'key': 'computeTypes', 'type': '[str]'},
'require_gpu': {'key': 'requireGpu', 'type': 'bool'},
'os_types': {'key': 'osTypes', 'type': '[str]'},
'support_serverless': {'key': 'supportServerless', 'type': 'bool'},
'compute_run_settings_mapping': {'key': 'computeRunSettingsMapping', 'type': '{[RunSettingParameter]}'},
}
def __init__(
self,
*,
compute_types: Optional[List[str]] = None,
require_gpu: Optional[bool] = None,
os_types: Optional[List[str]] = None,
support_serverless: Optional[bool] = None,
compute_run_settings_mapping: Optional[Dict[str, List["RunSettingParameter"]]] = None,
**kwargs
):
"""
:keyword compute_types:
:paramtype compute_types: list[str]
:keyword require_gpu:
:paramtype require_gpu: bool
:keyword os_types:
:paramtype os_types: list[str]
:keyword support_serverless:
:paramtype support_serverless: bool
:keyword compute_run_settings_mapping: Dictionary of
<components·10my8oj·schemas·uicomputeselection·properties·computerunsettingsmapping·additionalproperties>.
:paramtype compute_run_settings_mapping: dict[str, list[~flow.models.RunSettingParameter]]
"""
super(UIComputeSelection, self).__init__(**kwargs)
self.compute_types = compute_types
self.require_gpu = require_gpu
self.os_types = os_types
self.support_serverless = support_serverless
self.compute_run_settings_mapping = compute_run_settings_mapping
class UIHyperparameterConfiguration(msrest.serialization.Model):
"""UIHyperparameterConfiguration.
:ivar model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:vartype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:ivar distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:vartype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'model_name_to_hyper_parameter_and_distribution_mapping': {'key': 'modelNameToHyperParameterAndDistributionMapping', 'type': '{{[str]}}'},
'distribution_parameters_mapping': {'key': 'distributionParametersMapping', 'type': '{[DistributionParameter]}'},
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
*,
model_name_to_hyper_parameter_and_distribution_mapping: Optional[Dict[str, Dict[str, List[str]]]] = None,
distribution_parameters_mapping: Optional[Dict[str, List["DistributionParameter"]]] = None,
json_schema: Optional[str] = None,
**kwargs
):
"""
:keyword model_name_to_hyper_parameter_and_distribution_mapping: Dictionary of
<components·1nrp69t·schemas·uihyperparameterconfiguration·properties·modelnametohyperparameteranddistributionmapping·additionalproperties>.
:paramtype model_name_to_hyper_parameter_and_distribution_mapping: dict[str, dict[str,
list[str]]]
:keyword distribution_parameters_mapping: Dictionary of
<components·d9plq4·schemas·uihyperparameterconfiguration·properties·distributionparametersmapping·additionalproperties>.
:paramtype distribution_parameters_mapping: dict[str, list[~flow.models.DistributionParameter]]
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIHyperparameterConfiguration, self).__init__(**kwargs)
self.model_name_to_hyper_parameter_and_distribution_mapping = model_name_to_hyper_parameter_and_distribution_mapping
self.distribution_parameters_mapping = distribution_parameters_mapping
self.json_schema = json_schema
class UIInputSetting(msrest.serialization.Model):
"""UIInputSetting.
:ivar name:
:vartype name: str
:ivar data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:vartype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:ivar path_on_compute:
:vartype path_on_compute: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'data_delivery_mode': {'key': 'dataDeliveryMode', 'type': 'str'},
'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
data_delivery_mode: Optional[Union[str, "UIInputDataDeliveryMode"]] = None,
path_on_compute: Optional[str] = None,
**kwargs
):
"""
:keyword name:
:paramtype name: str
:keyword data_delivery_mode: Possible values include: "Read-only mount", "Read-write mount",
"Download", "Direct", "Evaluate mount", "Evaluate download", "Hdfs".
:paramtype data_delivery_mode: str or ~flow.models.UIInputDataDeliveryMode
:keyword path_on_compute:
:paramtype path_on_compute: str
"""
super(UIInputSetting, self).__init__(**kwargs)
self.name = name
self.data_delivery_mode = data_delivery_mode
self.path_on_compute = path_on_compute
class UIJsonEditor(msrest.serialization.Model):
"""UIJsonEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
*,
json_schema: Optional[str] = None,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIJsonEditor, self).__init__(**kwargs)
self.json_schema = json_schema
class UIParameterHint(msrest.serialization.Model):
"""UIParameterHint.
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:ivar column_picker:
:vartype column_picker: ~flow.models.UIColumnPicker
:ivar ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:vartype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:ivar json_editor:
:vartype json_editor: ~flow.models.UIJsonEditor
:ivar prompt_flow_connection_selector:
:vartype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:ivar azure_open_ai_deployment_name_selector:
:vartype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:ivar ux_ignore:
:vartype ux_ignore: bool
:ivar anonymous:
:vartype anonymous: bool
"""
_attribute_map = {
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
'column_picker': {'key': 'columnPicker', 'type': 'UIColumnPicker'},
'ui_script_language': {'key': 'uiScriptLanguage', 'type': 'str'},
'json_editor': {'key': 'jsonEditor', 'type': 'UIJsonEditor'},
'prompt_flow_connection_selector': {'key': 'PromptFlowConnectionSelector', 'type': 'UIPromptFlowConnectionSelector'},
'azure_open_ai_deployment_name_selector': {'key': 'AzureOpenAIDeploymentNameSelector', 'type': 'UIAzureOpenAIDeploymentNameSelector'},
'ux_ignore': {'key': 'UxIgnore', 'type': 'bool'},
'anonymous': {'key': 'Anonymous', 'type': 'bool'},
}
def __init__(
self,
*,
ui_widget_type: Optional[Union[str, "UIWidgetTypeEnum"]] = None,
column_picker: Optional["UIColumnPicker"] = None,
ui_script_language: Optional[Union[str, "UIScriptLanguageEnum"]] = None,
json_editor: Optional["UIJsonEditor"] = None,
prompt_flow_connection_selector: Optional["UIPromptFlowConnectionSelector"] = None,
azure_open_ai_deployment_name_selector: Optional["UIAzureOpenAIDeploymentNameSelector"] = None,
ux_ignore: Optional[bool] = None,
anonymous: Optional[bool] = None,
**kwargs
):
"""
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
:keyword column_picker:
:paramtype column_picker: ~flow.models.UIColumnPicker
:keyword ui_script_language: Possible values include: "None", "Python", "R", "Json", "Sql".
:paramtype ui_script_language: str or ~flow.models.UIScriptLanguageEnum
:keyword json_editor:
:paramtype json_editor: ~flow.models.UIJsonEditor
:keyword prompt_flow_connection_selector:
:paramtype prompt_flow_connection_selector: ~flow.models.UIPromptFlowConnectionSelector
:keyword azure_open_ai_deployment_name_selector:
:paramtype azure_open_ai_deployment_name_selector:
~flow.models.UIAzureOpenAIDeploymentNameSelector
:keyword ux_ignore:
:paramtype ux_ignore: bool
:keyword anonymous:
:paramtype anonymous: bool
"""
super(UIParameterHint, self).__init__(**kwargs)
self.ui_widget_type = ui_widget_type
self.column_picker = column_picker
self.ui_script_language = ui_script_language
self.json_editor = json_editor
self.prompt_flow_connection_selector = prompt_flow_connection_selector
self.azure_open_ai_deployment_name_selector = azure_open_ai_deployment_name_selector
self.ux_ignore = ux_ignore
self.anonymous = anonymous
class UIPromptFlowConnectionSelector(msrest.serialization.Model):
"""UIPromptFlowConnectionSelector.
:ivar prompt_flow_connection_type:
:vartype prompt_flow_connection_type: str
"""
_attribute_map = {
'prompt_flow_connection_type': {'key': 'PromptFlowConnectionType', 'type': 'str'},
}
def __init__(
self,
*,
prompt_flow_connection_type: Optional[str] = None,
**kwargs
):
"""
:keyword prompt_flow_connection_type:
:paramtype prompt_flow_connection_type: str
"""
super(UIPromptFlowConnectionSelector, self).__init__(**kwargs)
self.prompt_flow_connection_type = prompt_flow_connection_type
class UIWidgetMetaInfo(msrest.serialization.Model):
"""UIWidgetMetaInfo.
:ivar module_node_id:
:vartype module_node_id: str
:ivar meta_module_id:
:vartype meta_module_id: str
:ivar parameter_name:
:vartype parameter_name: str
:ivar ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker", "Credential",
"Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter", "SectionToggle",
"YamlEditor", "EnableRuntimeSweep", "DataStoreSelection", "InstanceTypeSelection",
"ConnectionSelection", "PromptFlowConnectionSelection", "AzureOpenAIDeploymentNameSelection".
:vartype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
_attribute_map = {
'module_node_id': {'key': 'moduleNodeId', 'type': 'str'},
'meta_module_id': {'key': 'metaModuleId', 'type': 'str'},
'parameter_name': {'key': 'parameterName', 'type': 'str'},
'ui_widget_type': {'key': 'uiWidgetType', 'type': 'str'},
}
def __init__(
self,
*,
module_node_id: Optional[str] = None,
meta_module_id: Optional[str] = None,
parameter_name: Optional[str] = None,
ui_widget_type: Optional[Union[str, "UIWidgetTypeEnum"]] = None,
**kwargs
):
"""
:keyword module_node_id:
:paramtype module_node_id: str
:keyword meta_module_id:
:paramtype meta_module_id: str
:keyword parameter_name:
:paramtype parameter_name: str
:keyword ui_widget_type: Possible values include: "Default", "Mode", "ColumnPicker",
"Credential", "Script", "ComputeSelection", "JsonEditor", "SearchSpaceParameter",
"SectionToggle", "YamlEditor", "EnableRuntimeSweep", "DataStoreSelection",
"InstanceTypeSelection", "ConnectionSelection", "PromptFlowConnectionSelection",
"AzureOpenAIDeploymentNameSelection".
:paramtype ui_widget_type: str or ~flow.models.UIWidgetTypeEnum
"""
super(UIWidgetMetaInfo, self).__init__(**kwargs)
self.module_node_id = module_node_id
self.meta_module_id = meta_module_id
self.parameter_name = parameter_name
self.ui_widget_type = ui_widget_type
class UIYamlEditor(msrest.serialization.Model):
"""UIYamlEditor.
:ivar json_schema:
:vartype json_schema: str
"""
_attribute_map = {
'json_schema': {'key': 'jsonSchema', 'type': 'str'},
}
def __init__(
self,
*,
json_schema: Optional[str] = None,
**kwargs
):
"""
:keyword json_schema:
:paramtype json_schema: str
"""
super(UIYamlEditor, self).__init__(**kwargs)
self.json_schema = json_schema
class UnversionedEntityRequestDto(msrest.serialization.Model):
"""UnversionedEntityRequestDto.
:ivar unversioned_entity_ids:
:vartype unversioned_entity_ids: list[str]
"""
_attribute_map = {
'unversioned_entity_ids': {'key': 'unversionedEntityIds', 'type': '[str]'},
}
def __init__(
self,
*,
unversioned_entity_ids: Optional[List[str]] = None,
**kwargs
):
"""
:keyword unversioned_entity_ids:
:paramtype unversioned_entity_ids: list[str]
"""
super(UnversionedEntityRequestDto, self).__init__(**kwargs)
self.unversioned_entity_ids = unversioned_entity_ids
class UnversionedEntityResponseDto(msrest.serialization.Model):
"""UnversionedEntityResponseDto.
:ivar unversioned_entities:
:vartype unversioned_entities: list[~flow.models.FlowIndexEntity]
:ivar unversioned_entity_json_schema: Anything.
:vartype unversioned_entity_json_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'unversioned_entities': {'key': 'unversionedEntities', 'type': '[FlowIndexEntity]'},
'unversioned_entity_json_schema': {'key': 'unversionedEntityJsonSchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
*,
unversioned_entities: Optional[List["FlowIndexEntity"]] = None,
unversioned_entity_json_schema: Optional[Any] = None,
normalized_request_charge: Optional[float] = None,
normalized_request_charge_period: Optional[str] = None,
**kwargs
):
"""
:keyword unversioned_entities:
:paramtype unversioned_entities: list[~flow.models.FlowIndexEntity]
:keyword unversioned_entity_json_schema: Anything.
:paramtype unversioned_entity_json_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedEntityResponseDto, self).__init__(**kwargs)
self.unversioned_entities = unversioned_entities
self.unversioned_entity_json_schema = unversioned_entity_json_schema
self.normalized_request_charge = normalized_request_charge
self.normalized_request_charge_period = normalized_request_charge_period
class UnversionedRebuildIndexDto(msrest.serialization.Model):
"""UnversionedRebuildIndexDto.
:ivar continuation_token:
:vartype continuation_token: str
:ivar entity_count:
:vartype entity_count: int
:ivar entity_container_type:
:vartype entity_container_type: str
:ivar entity_type:
:vartype entity_type: str
:ivar resource_id:
:vartype resource_id: str
:ivar workspace_id:
:vartype workspace_id: str
:ivar immutable_resource_id:
:vartype immutable_resource_id: str
:ivar start_time:
:vartype start_time: ~datetime.datetime
:ivar end_time:
:vartype end_time: ~datetime.datetime
"""
_attribute_map = {
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'entity_count': {'key': 'entityCount', 'type': 'int'},
'entity_container_type': {'key': 'entityContainerType', 'type': 'str'},
'entity_type': {'key': 'entityType', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'immutable_resource_id': {'key': 'immutableResourceId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
continuation_token: Optional[str] = None,
entity_count: Optional[int] = None,
entity_container_type: Optional[str] = None,
entity_type: Optional[str] = None,
resource_id: Optional[str] = None,
workspace_id: Optional[str] = None,
immutable_resource_id: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
**kwargs
):
"""
:keyword continuation_token:
:paramtype continuation_token: str
:keyword entity_count:
:paramtype entity_count: int
:keyword entity_container_type:
:paramtype entity_container_type: str
:keyword entity_type:
:paramtype entity_type: str
:keyword resource_id:
:paramtype resource_id: str
:keyword workspace_id:
:paramtype workspace_id: str
:keyword immutable_resource_id:
:paramtype immutable_resource_id: str
:keyword start_time:
:paramtype start_time: ~datetime.datetime
:keyword end_time:
:paramtype end_time: ~datetime.datetime
"""
super(UnversionedRebuildIndexDto, self).__init__(**kwargs)
self.continuation_token = continuation_token
self.entity_count = entity_count
self.entity_container_type = entity_container_type
self.entity_type = entity_type
self.resource_id = resource_id
self.workspace_id = workspace_id
self.immutable_resource_id = immutable_resource_id
self.start_time = start_time
self.end_time = end_time
class UnversionedRebuildResponseDto(msrest.serialization.Model):
"""UnversionedRebuildResponseDto.
:ivar entities:
:vartype entities: ~flow.models.SegmentedResult1
:ivar unversioned_entity_schema: Anything.
:vartype unversioned_entity_schema: any
:ivar normalized_request_charge:
:vartype normalized_request_charge: float
:ivar normalized_request_charge_period:
:vartype normalized_request_charge_period: str
"""
_attribute_map = {
'entities': {'key': 'entities', 'type': 'SegmentedResult1'},
'unversioned_entity_schema': {'key': 'unversionedEntitySchema', 'type': 'object'},
'normalized_request_charge': {'key': 'normalizedRequestCharge', 'type': 'float'},
'normalized_request_charge_period': {'key': 'normalizedRequestChargePeriod', 'type': 'str'},
}
def __init__(
self,
*,
entities: Optional["SegmentedResult1"] = None,
unversioned_entity_schema: Optional[Any] = None,
normalized_request_charge: Optional[float] = None,
normalized_request_charge_period: Optional[str] = None,
**kwargs
):
"""
:keyword entities:
:paramtype entities: ~flow.models.SegmentedResult1
:keyword unversioned_entity_schema: Anything.
:paramtype unversioned_entity_schema: any
:keyword normalized_request_charge:
:paramtype normalized_request_charge: float
:keyword normalized_request_charge_period:
:paramtype normalized_request_charge_period: str
"""
super(UnversionedRebuildResponseDto, self).__init__(**kwargs)
self.entities = entities
self.unversioned_entity_schema = unversioned_entity_schema
self.normalized_request_charge = normalized_request_charge
self.normalized_request_charge_period = normalized_request_charge_period
class UpdateComponentRequest(msrest.serialization.Model):
"""UpdateComponentRequest.
:ivar display_name:
:vartype display_name: str
:ivar description:
:vartype description: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:vartype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:ivar module_version:
:vartype module_version: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'module_update_operation_type': {'key': 'moduleUpdateOperationType', 'type': 'str'},
'module_version': {'key': 'moduleVersion', 'type': 'str'},
}
def __init__(
self,
*,
display_name: Optional[str] = None,
description: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
module_update_operation_type: Optional[Union[str, "ModuleUpdateOperationType"]] = None,
module_version: Optional[str] = None,
**kwargs
):
"""
:keyword display_name:
:paramtype display_name: str
:keyword description:
:paramtype description: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword module_update_operation_type: Possible values include: "SetDefaultVersion",
"EnableModule", "DisableModule", "UpdateDisplayName", "UpdateDescription", "UpdateTags".
:paramtype module_update_operation_type: str or ~flow.models.ModuleUpdateOperationType
:keyword module_version:
:paramtype module_version: str
"""
super(UpdateComponentRequest, self).__init__(**kwargs)
self.display_name = display_name
self.description = description
self.tags = tags
self.module_update_operation_type = module_update_operation_type
self.module_version = module_version
class UpdateFlowRequest(msrest.serialization.Model):
"""UpdateFlowRequest.
:ivar flow_run_result:
:vartype flow_run_result: ~flow.models.FlowRunResult
:ivar flow_test_mode: Possible values include: "Sync", "Async".
:vartype flow_test_mode: str or ~flow.models.FlowTestMode
:ivar flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:vartype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:ivar flow_name:
:vartype flow_name: str
:ivar description:
:vartype description: str
:ivar details:
:vartype details: str
:ivar tags: A set of tags. Dictionary of :code:`<string>`.
:vartype tags: dict[str, str]
:ivar flow:
:vartype flow: ~flow.models.Flow
:ivar flow_definition_file_path:
:vartype flow_definition_file_path: str
:ivar flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:vartype flow_type: str or ~flow.models.FlowType
:ivar flow_run_settings:
:vartype flow_run_settings: ~flow.models.FlowRunSettings
:ivar is_archived:
:vartype is_archived: bool
:ivar vm_size:
:vartype vm_size: str
:ivar max_idle_time_seconds:
:vartype max_idle_time_seconds: long
:ivar identity:
:vartype identity: str
"""
_attribute_map = {
'flow_run_result': {'key': 'flowRunResult', 'type': 'FlowRunResult'},
'flow_test_mode': {'key': 'flowTestMode', 'type': 'str'},
'flow_test_infos': {'key': 'flowTestInfos', 'type': '{FlowTestInfo}'},
'flow_name': {'key': 'flowName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'flow': {'key': 'flow', 'type': 'Flow'},
'flow_definition_file_path': {'key': 'flowDefinitionFilePath', 'type': 'str'},
'flow_type': {'key': 'flowType', 'type': 'str'},
'flow_run_settings': {'key': 'flowRunSettings', 'type': 'FlowRunSettings'},
'is_archived': {'key': 'isArchived', 'type': 'bool'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'max_idle_time_seconds': {'key': 'maxIdleTimeSeconds', 'type': 'long'},
'identity': {'key': 'identity', 'type': 'str'},
}
def __init__(
self,
*,
flow_run_result: Optional["FlowRunResult"] = None,
flow_test_mode: Optional[Union[str, "FlowTestMode"]] = None,
flow_test_infos: Optional[Dict[str, "FlowTestInfo"]] = None,
flow_name: Optional[str] = None,
description: Optional[str] = None,
details: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
flow: Optional["Flow"] = None,
flow_definition_file_path: Optional[str] = None,
flow_type: Optional[Union[str, "FlowType"]] = None,
flow_run_settings: Optional["FlowRunSettings"] = None,
is_archived: Optional[bool] = None,
vm_size: Optional[str] = None,
max_idle_time_seconds: Optional[int] = None,
identity: Optional[str] = None,
**kwargs
):
"""
:keyword flow_run_result:
:paramtype flow_run_result: ~flow.models.FlowRunResult
:keyword flow_test_mode: Possible values include: "Sync", "Async".
:paramtype flow_test_mode: str or ~flow.models.FlowTestMode
:keyword flow_test_infos: Dictionary of :code:`<FlowTestInfo>`.
:paramtype flow_test_infos: dict[str, ~flow.models.FlowTestInfo]
:keyword flow_name:
:paramtype flow_name: str
:keyword description:
:paramtype description: str
:keyword details:
:paramtype details: str
:keyword tags: A set of tags. Dictionary of :code:`<string>`.
:paramtype tags: dict[str, str]
:keyword flow:
:paramtype flow: ~flow.models.Flow
:keyword flow_definition_file_path:
:paramtype flow_definition_file_path: str
:keyword flow_type: Possible values include: "Default", "Evaluation", "Chat", "Rag".
:paramtype flow_type: str or ~flow.models.FlowType
:keyword flow_run_settings:
:paramtype flow_run_settings: ~flow.models.FlowRunSettings
:keyword is_archived:
:paramtype is_archived: bool
:keyword vm_size:
:paramtype vm_size: str
:keyword max_idle_time_seconds:
:paramtype max_idle_time_seconds: long
:keyword identity:
:paramtype identity: str
"""
super(UpdateFlowRequest, self).__init__(**kwargs)
self.flow_run_result = flow_run_result
self.flow_test_mode = flow_test_mode
self.flow_test_infos = flow_test_infos
self.flow_name = flow_name
self.description = description
self.details = details
self.tags = tags
self.flow = flow
self.flow_definition_file_path = flow_definition_file_path
self.flow_type = flow_type
self.flow_run_settings = flow_run_settings
self.is_archived = is_archived
self.vm_size = vm_size
self.max_idle_time_seconds = max_idle_time_seconds
self.identity = identity
class UpdateFlowRuntimeRequest(msrest.serialization.Model):
"""UpdateFlowRuntimeRequest.
:ivar runtime_description:
:vartype runtime_description: str
:ivar environment:
:vartype environment: str
:ivar instance_count:
:vartype instance_count: int
"""
_attribute_map = {
'runtime_description': {'key': 'runtimeDescription', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'str'},
'instance_count': {'key': 'instanceCount', 'type': 'int'},
}
def __init__(
self,
*,
runtime_description: Optional[str] = None,
environment: Optional[str] = None,
instance_count: Optional[int] = None,
**kwargs
):
"""
:keyword runtime_description:
:paramtype runtime_description: str
:keyword environment:
:paramtype environment: str
:keyword instance_count:
:paramtype instance_count: int
"""
super(UpdateFlowRuntimeRequest, self).__init__(**kwargs)
self.runtime_description = runtime_description
self.environment = environment
self.instance_count = instance_count
class UpdateRegistryComponentRequest(msrest.serialization.Model):
"""UpdateRegistryComponentRequest.
:ivar registry_name:
:vartype registry_name: str
:ivar component_name:
:vartype component_name: str
:ivar component_version:
:vartype component_version: str
:ivar update_type: The only acceptable values to pass in are None and "SetDefaultVersion". The
default value is None.
:vartype update_type: str
"""
_attribute_map = {
'registry_name': {'key': 'registryName', 'type': 'str'},
'component_name': {'key': 'componentName', 'type': 'str'},
'component_version': {'key': 'componentVersion', 'type': 'str'},
'update_type': {'key': 'updateType', 'type': 'str'},
}
def __init__(
self,
*,
registry_name: Optional[str] = None,
component_name: Optional[str] = None,
component_version: Optional[str] = None,
update_type: Optional[str] = None,
**kwargs
):
"""
:keyword registry_name:
:paramtype registry_name: str
:keyword component_name:
:paramtype component_name: str
:keyword component_version:
:paramtype component_version: str
:keyword update_type: The only acceptable values to pass in are None and "SetDefaultVersion".
The default value is None.
:paramtype update_type: str
"""
super(UpdateRegistryComponentRequest, self).__init__(**kwargs)
self.registry_name = registry_name
self.component_name = component_name
self.component_version = component_version
self.update_type = update_type
class UploadOptions(msrest.serialization.Model):
"""UploadOptions.
:ivar overwrite:
:vartype overwrite: bool
:ivar source_globs:
:vartype source_globs: ~flow.models.ExecutionGlobsOptions
"""
_attribute_map = {
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'source_globs': {'key': 'sourceGlobs', 'type': 'ExecutionGlobsOptions'},
}
def __init__(
self,
*,
overwrite: Optional[bool] = None,
source_globs: Optional["ExecutionGlobsOptions"] = None,
**kwargs
):
"""
:keyword overwrite:
:paramtype overwrite: bool
:keyword source_globs:
:paramtype source_globs: ~flow.models.ExecutionGlobsOptions
"""
super(UploadOptions, self).__init__(**kwargs)
self.overwrite = overwrite
self.source_globs = source_globs
class UriReference(msrest.serialization.Model):
"""UriReference.
:ivar path:
:vartype path: str
:ivar is_file:
:vartype is_file: bool
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'is_file': {'key': 'isFile', 'type': 'bool'},
}
def __init__(
self,
*,
path: Optional[str] = None,
is_file: Optional[bool] = None,
**kwargs
):
"""
:keyword path:
:paramtype path: str
:keyword is_file:
:paramtype is_file: bool
"""
super(UriReference, self).__init__(**kwargs)
self.path = path
self.is_file = is_file
class User(msrest.serialization.Model):
"""User.
:ivar user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:vartype user_object_id: str
:ivar user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:vartype user_pu_id: str
:ivar user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:vartype user_idp: str
:ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:vartype user_alt_sec_id: str
:ivar user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:vartype user_iss: str
:ivar user_tenant_id: A user or service principal's tenant ID.
:vartype user_tenant_id: str
:ivar user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:vartype user_name: str
:ivar upn: A user's Principal name (upn)
This is PII andshould never be logged.
:vartype upn: str
"""
_attribute_map = {
'user_object_id': {'key': 'userObjectId', 'type': 'str'},
'user_pu_id': {'key': 'userPuId', 'type': 'str'},
'user_idp': {'key': 'userIdp', 'type': 'str'},
'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
'user_iss': {'key': 'userIss', 'type': 'str'},
'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'upn': {'key': 'upn', 'type': 'str'},
}
def __init__(
self,
*,
user_object_id: Optional[str] = None,
user_pu_id: Optional[str] = None,
user_idp: Optional[str] = None,
user_alt_sec_id: Optional[str] = None,
user_iss: Optional[str] = None,
user_tenant_id: Optional[str] = None,
user_name: Optional[str] = None,
upn: Optional[str] = None,
**kwargs
):
"""
:keyword user_object_id: A user or service principal's object ID.
This is EUPI and may only be logged to warm path telemetry.
:paramtype user_object_id: str
:keyword user_pu_id: A user or service principal's PuID.
This is PII and should never be logged.
:paramtype user_pu_id: str
:keyword user_idp: A user identity provider. Eg live.com
This is PII and should never be logged.
:paramtype user_idp: str
:keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
identity provider system Eg.1:live.com:puid
This is PII and should never be logged.
:paramtype user_alt_sec_id: str
:keyword user_iss: The issuer which issed the token for this user.
This is PII and should never be logged.
:paramtype user_iss: str
:keyword user_tenant_id: A user or service principal's tenant ID.
:paramtype user_tenant_id: str
:keyword user_name: A user's full name or a service principal's app ID.
This is PII and should never be logged.
:paramtype user_name: str
:keyword upn: A user's Principal name (upn)
This is PII andshould never be logged.
:paramtype upn: str
"""
super(User, self).__init__(**kwargs)
self.user_object_id = user_object_id
self.user_pu_id = user_pu_id
self.user_idp = user_idp
self.user_alt_sec_id = user_alt_sec_id
self.user_iss = user_iss
self.user_tenant_id = user_tenant_id
self.user_name = user_name
self.upn = upn
class UserAssignedIdentity(msrest.serialization.Model):
"""UserAssignedIdentity.
:ivar principal_id:
:vartype principal_id: str
:ivar client_id:
:vartype client_id: str
"""
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
*,
principal_id: Optional[str] = None,
client_id: Optional[str] = None,
**kwargs
):
"""
:keyword principal_id:
:paramtype principal_id: str
:keyword client_id:
:paramtype client_id: str
"""
super(UserAssignedIdentity, self).__init__(**kwargs)
self.principal_id = principal_id
self.client_id = client_id
class ValidationDataSettings(msrest.serialization.Model):
"""ValidationDataSettings.
:ivar n_cross_validations:
:vartype n_cross_validations: ~flow.models.NCrossValidations
:ivar validation_data_size:
:vartype validation_data_size: float
:ivar cv_split_column_names:
:vartype cv_split_column_names: list[str]
:ivar validation_type:
:vartype validation_type: str
"""
_attribute_map = {
'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'},
'validation_data_size': {'key': 'validationDataSize', 'type': 'float'},
'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'},
'validation_type': {'key': 'validationType', 'type': 'str'},
}
def __init__(
self,
*,
n_cross_validations: Optional["NCrossValidations"] = None,
validation_data_size: Optional[float] = None,
cv_split_column_names: Optional[List[str]] = None,
validation_type: Optional[str] = None,
**kwargs
):
"""
:keyword n_cross_validations:
:paramtype n_cross_validations: ~flow.models.NCrossValidations
:keyword validation_data_size:
:paramtype validation_data_size: float
:keyword cv_split_column_names:
:paramtype cv_split_column_names: list[str]
:keyword validation_type:
:paramtype validation_type: str
"""
super(ValidationDataSettings, self).__init__(**kwargs)
self.n_cross_validations = n_cross_validations
self.validation_data_size = validation_data_size
self.cv_split_column_names = cv_split_column_names
self.validation_type = validation_type
class VariantNode(msrest.serialization.Model):
"""VariantNode.
:ivar node:
:vartype node: ~flow.models.Node
:ivar description:
:vartype description: str
"""
_attribute_map = {
'node': {'key': 'node', 'type': 'Node'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
node: Optional["Node"] = None,
description: Optional[str] = None,
**kwargs
):
"""
:keyword node:
:paramtype node: ~flow.models.Node
:keyword description:
:paramtype description: str
"""
super(VariantNode, self).__init__(**kwargs)
self.node = node
self.description = description
class Webhook(msrest.serialization.Model):
"""Webhook.
:ivar webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:vartype webhook_type: str
:ivar event_type:
:vartype event_type: str
"""
_attribute_map = {
'webhook_type': {'key': 'webhookType', 'type': 'str'},
'event_type': {'key': 'eventType', 'type': 'str'},
}
def __init__(
self,
*,
webhook_type: Optional[str] = None,
event_type: Optional[str] = None,
**kwargs
):
"""
:keyword webhook_type: The only acceptable values to pass in are None and "AzureDevOps". The
default value is None.
:paramtype webhook_type: str
:keyword event_type:
:paramtype event_type: str
"""
super(Webhook, self).__init__(**kwargs)
self.webhook_type = webhook_type
self.event_type = event_type
class WebServiceComputeMetaInfo(msrest.serialization.Model):
"""WebServiceComputeMetaInfo.
:ivar node_count:
:vartype node_count: int
:ivar is_ssl_enabled:
:vartype is_ssl_enabled: bool
:ivar aks_not_found:
:vartype aks_not_found: bool
:ivar cluster_purpose:
:vartype cluster_purpose: str
:ivar public_ip_address:
:vartype public_ip_address: str
:ivar vm_size:
:vartype vm_size: str
:ivar location:
:vartype location: str
:ivar provisioning_state:
:vartype provisioning_state: str
:ivar state:
:vartype state: str
:ivar os_type:
:vartype os_type: str
:ivar id:
:vartype id: str
:ivar name:
:vartype name: str
:ivar created_by_studio:
:vartype created_by_studio: bool
:ivar is_gpu_type:
:vartype is_gpu_type: bool
:ivar resource_id:
:vartype resource_id: str
:ivar compute_type:
:vartype compute_type: str
"""
_attribute_map = {
'node_count': {'key': 'nodeCount', 'type': 'int'},
'is_ssl_enabled': {'key': 'isSslEnabled', 'type': 'bool'},
'aks_not_found': {'key': 'aksNotFound', 'type': 'bool'},
'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
'vm_size': {'key': 'vmSize', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'created_by_studio': {'key': 'createdByStudio', 'type': 'bool'},
'is_gpu_type': {'key': 'isGpuType', 'type': 'bool'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'compute_type': {'key': 'computeType', 'type': 'str'},
}
def __init__(
self,
*,
node_count: Optional[int] = None,
is_ssl_enabled: Optional[bool] = None,
aks_not_found: Optional[bool] = None,
cluster_purpose: Optional[str] = None,
public_ip_address: Optional[str] = None,
vm_size: Optional[str] = None,
location: Optional[str] = None,
provisioning_state: Optional[str] = None,
state: Optional[str] = None,
os_type: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
created_by_studio: Optional[bool] = None,
is_gpu_type: Optional[bool] = None,
resource_id: Optional[str] = None,
compute_type: Optional[str] = None,
**kwargs
):
"""
:keyword node_count:
:paramtype node_count: int
:keyword is_ssl_enabled:
:paramtype is_ssl_enabled: bool
:keyword aks_not_found:
:paramtype aks_not_found: bool
:keyword cluster_purpose:
:paramtype cluster_purpose: str
:keyword public_ip_address:
:paramtype public_ip_address: str
:keyword vm_size:
:paramtype vm_size: str
:keyword location:
:paramtype location: str
:keyword provisioning_state:
:paramtype provisioning_state: str
:keyword state:
:paramtype state: str
:keyword os_type:
:paramtype os_type: str
:keyword id:
:paramtype id: str
:keyword name:
:paramtype name: str
:keyword created_by_studio:
:paramtype created_by_studio: bool
:keyword is_gpu_type:
:paramtype is_gpu_type: bool
:keyword resource_id:
:paramtype resource_id: str
:keyword compute_type:
:paramtype compute_type: str
"""
super(WebServiceComputeMetaInfo, self).__init__(**kwargs)
self.node_count = node_count
self.is_ssl_enabled = is_ssl_enabled
self.aks_not_found = aks_not_found
self.cluster_purpose = cluster_purpose
self.public_ip_address = public_ip_address
self.vm_size = vm_size
self.location = location
self.provisioning_state = provisioning_state
self.state = state
self.os_type = os_type
self.id = id
self.name = name
self.created_by_studio = created_by_studio
self.is_gpu_type = is_gpu_type
self.resource_id = resource_id
self.compute_type = compute_type
class WebServicePort(msrest.serialization.Model):
"""WebServicePort.
:ivar node_id:
:vartype node_id: str
:ivar port_name:
:vartype port_name: str
:ivar name:
:vartype name: str
"""
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'port_name': {'key': 'portName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
node_id: Optional[str] = None,
port_name: Optional[str] = None,
name: Optional[str] = None,
**kwargs
):
"""
:keyword node_id:
:paramtype node_id: str
:keyword port_name:
:paramtype port_name: str
:keyword name:
:paramtype name: str
"""
super(WebServicePort, self).__init__(**kwargs)
self.node_id = node_id
self.port_name = port_name
self.name = name
class WorkspaceConnectionSpec(msrest.serialization.Model):
"""WorkspaceConnectionSpec.
:ivar connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:vartype connection_category: str or ~flow.models.ConnectionCategory
:ivar flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:vartype flow_value_type: str or ~flow.models.ValueType
:ivar connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:vartype connection_type: str or ~flow.models.ConnectionType
:ivar connection_type_display_name:
:vartype connection_type_display_name: str
:ivar config_specs:
:vartype config_specs: list[~flow.models.ConnectionConfigSpec]
:ivar module:
:vartype module: str
"""
_attribute_map = {
'connection_category': {'key': 'connectionCategory', 'type': 'str'},
'flow_value_type': {'key': 'flowValueType', 'type': 'str'},
'connection_type': {'key': 'connectionType', 'type': 'str'},
'connection_type_display_name': {'key': 'connectionTypeDisplayName', 'type': 'str'},
'config_specs': {'key': 'configSpecs', 'type': '[ConnectionConfigSpec]'},
'module': {'key': 'module', 'type': 'str'},
}
def __init__(
self,
*,
connection_category: Optional[Union[str, "ConnectionCategory"]] = None,
flow_value_type: Optional[Union[str, "ValueType"]] = None,
connection_type: Optional[Union[str, "ConnectionType"]] = None,
connection_type_display_name: Optional[str] = None,
config_specs: Optional[List["ConnectionConfigSpec"]] = None,
module: Optional[str] = None,
**kwargs
):
"""
:keyword connection_category: Possible values include: "PythonFeed", "ACR", "Git", "S3",
"Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb",
"AzureDataLakeGen2", "Redis", "ApiKey", "AzureOpenAI", "CognitiveSearch", "CognitiveService",
"CustomKeys", "AzureBlob", "AzureOneLake", "CosmosDb", "CosmosDbMongoDbApi",
"AzureDataExplorer", "AzureMariaDb", "AzureDatabricksDeltaLake", "AzureSqlMi",
"AzureTableStorage", "AmazonRdsForOracle", "AmazonRdsForSqlServer", "AmazonRedshift", "Db2",
"Drill", "GoogleBigQuery", "Greenplum", "Hbase", "Hive", "Impala", "Informix", "MariaDb",
"MicrosoftAccess", "MySql", "Netezza", "Oracle", "Phoenix", "PostgreSql", "Presto",
"SapOpenHub", "SapBw", "SapHana", "SapTable", "Spark", "SqlServer", "Sybase", "Teradata",
"Vertica", "Cassandra", "Couchbase", "MongoDbV2", "MongoDbAtlas", "AmazonS3Compatible",
"FileServer", "FtpServer", "GoogleCloudStorage", "Hdfs", "OracleCloudStorage", "Sftp",
"GenericHttp", "ODataRest", "Odbc", "GenericRest", "AmazonMws", "Concur", "Dynamics",
"DynamicsAx", "DynamicsCrm", "GoogleAdWords", "Hubspot", "Jira", "Magento", "Marketo",
"Office365", "Eloqua", "Responsys", "OracleServiceCloud", "PayPal", "QuickBooks", "Salesforce",
"SalesforceServiceCloud", "SalesforceMarketingCloud", "SapCloudForCustomer", "SapEcc",
"ServiceNow", "SharePointOnlineList", "Shopify", "Square", "WebTable", "Xero", "Zoho",
"GenericContainerRegistry".
:paramtype connection_category: str or ~flow.models.ConnectionCategory
:keyword flow_value_type: Possible values include: "int", "double", "bool", "string", "secret",
"prompt_template", "object", "list", "BingConnection", "OpenAIConnection",
"AzureOpenAIConnection", "AzureContentModeratorConnection", "CustomConnection",
"AzureContentSafetyConnection", "SerpConnection", "CognitiveSearchConnection",
"SubstrateLLMConnection", "PineconeConnection", "QdrantConnection", "WeaviateConnection",
"function_list", "function_str", "FormRecognizerConnection", "file_path", "image".
:paramtype flow_value_type: str or ~flow.models.ValueType
:keyword connection_type: Possible values include: "OpenAI", "AzureOpenAI", "Serp", "Bing",
"AzureContentModerator", "Custom", "AzureContentSafety", "CognitiveSearch", "SubstrateLLM",
"Pinecone", "Qdrant", "Weaviate", "FormRecognizer".
:paramtype connection_type: str or ~flow.models.ConnectionType
:keyword connection_type_display_name:
:paramtype connection_type_display_name: str
:keyword config_specs:
:paramtype config_specs: list[~flow.models.ConnectionConfigSpec]
:keyword module:
:paramtype module: str
"""
super(WorkspaceConnectionSpec, self).__init__(**kwargs)
self.connection_category = connection_category
self.flow_value_type = flow_value_type
self.connection_type = connection_type
self.connection_type_display_name = connection_type_display_name
self.config_specs = config_specs
self.module = module
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_restclient/flow/models/_azure_machine_learning_designer_service_client_enums.py | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.8.0, generator: @autorest/[email protected])
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class ActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SEND_VALIDATION_REQUEST = "SendValidationRequest"
GET_VALIDATION_STATUS = "GetValidationStatus"
SUBMIT_BULK_RUN = "SubmitBulkRun"
LOG_RUN_RESULT = "LogRunResult"
LOG_RUN_TERMINATED_EVENT = "LogRunTerminatedEvent"
class AetherArgumentValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LITERAL = "Literal"
PARAMETER = "Parameter"
INPUT = "Input"
OUTPUT = "Output"
NESTED_LIST = "NestedList"
STRING_INTERPOLATION_LIST = "StringInterpolationList"
class AetherAssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
URI_FILE = "UriFile"
URI_FOLDER = "UriFolder"
ML_TABLE = "MLTable"
CUSTOM_MODEL = "CustomModel"
ML_FLOW_MODEL = "MLFlowModel"
TRITON_MODEL = "TritonModel"
OPEN_AI_MODEL = "OpenAIModel"
class AetherBuildSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CLOUD_BUILD = "CloudBuild"
VSO = "Vso"
VSO_GIT = "VsoGit"
class AetherComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
BATCH_AI = "BatchAi"
MLC = "MLC"
HDI_CLUSTER = "HdiCluster"
REMOTE_DOCKER = "RemoteDocker"
DATABRICKS = "Databricks"
AISC = "Aisc"
class AetherControlFlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
DO_WHILE = "DoWhile"
PARALLEL_FOR = "ParallelFor"
class AetherControlInputValue(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
FALSE = "False"
TRUE = "True"
SKIPPED = "Skipped"
class AetherDataCopyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MERGE_WITH_OVERWRITE = "MergeWithOverwrite"
FAIL_IF_CONFLICT = "FailIfConflict"
class AetherDataLocationStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
COSMOS = "Cosmos"
AZURE_BLOB = "AzureBlob"
ARTIFACT = "Artifact"
SNAPSHOT = "Snapshot"
SAVED_AML_DATASET = "SavedAmlDataset"
ASSET = "Asset"
class AetherDataReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
AZURE_BLOB = "AzureBlob"
AZURE_DATA_LAKE = "AzureDataLake"
AZURE_FILES = "AzureFiles"
COSMOS = "Cosmos"
PHILLY_HDFS = "PhillyHdfs"
AZURE_SQL_DATABASE = "AzureSqlDatabase"
AZURE_POSTGRES_DATABASE = "AzurePostgresDatabase"
AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
DBFS = "DBFS"
AZURE_MY_SQL_DATABASE = "AzureMySqlDatabase"
CUSTOM = "Custom"
HDFS = "Hdfs"
class AetherDatasetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
FILE = "File"
TABULAR = "Tabular"
class AetherDataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
MOUNT = "Mount"
DOWNLOAD = "Download"
UPLOAD = "Upload"
DIRECT = "Direct"
HDFS = "Hdfs"
LINK = "Link"
class AetherDataTransferStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DATA_BASE = "DataBase"
FILE_SYSTEM = "FileSystem"
class AetherDataTransferTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
IMPORT_DATA = "ImportData"
EXPORT_DATA = "ExportData"
COPY_DATA = "CopyData"
class AetherEarlyTerminationPolicyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
BANDIT = "Bandit"
MEDIAN_STOPPING = "MedianStopping"
TRUNCATION_SELECTION = "TruncationSelection"
class AetherEntityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ACTIVE = "Active"
DEPRECATED = "Deprecated"
DISABLED = "Disabled"
class AetherExecutionEnvironment(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
EXE_WORKER_MACHINE = "ExeWorkerMachine"
DOCKER_CONTAINER_WITHOUT_NETWORK = "DockerContainerWithoutNetwork"
DOCKER_CONTAINER_WITH_NETWORK = "DockerContainerWithNetwork"
HYPER_V_WITHOUT_NETWORK = "HyperVWithoutNetwork"
HYPER_V_WITH_NETWORK = "HyperVWithNetwork"
class AetherExecutionPhase(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
EXECUTION = "Execution"
INITIALIZATION = "Initialization"
FINALIZATION = "Finalization"
class AetherFeaturizationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
OFF = "Off"
class AetherFileBasedPathType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
FILE = "File"
FOLDER = "Folder"
class AetherForecastHorizonMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class AetherIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
USER_IDENTITY = "UserIdentity"
MANAGED = "Managed"
AML_TOKEN = "AMLToken"
class AetherLogVerbosity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_SET = "NotSet"
DEBUG = "Debug"
INFO = "Info"
WARNING = "Warning"
ERROR = "Error"
CRITICAL = "Critical"
class AetherModuleDeploymentSource(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CLIENT = "Client"
AUTO_DEPLOYMENT = "AutoDeployment"
VSTS = "Vsts"
class AetherModuleHashVersion(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
IDENTIFIER_HASH = "IdentifierHash"
IDENTIFIER_HASH_V2 = "IdentifierHashV2"
class AetherModuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
BATCH_INFERENCING = "BatchInferencing"
class AetherNCrossValidationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class AetherParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INT = "Int"
DOUBLE = "Double"
BOOL = "Bool"
STRING = "String"
UNDEFINED = "Undefined"
class AetherParameterValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LITERAL = "Literal"
GRAPH_PARAMETER_NAME = "GraphParameterName"
CONCATENATE = "Concatenate"
INPUT = "Input"
DATA_PATH = "DataPath"
DATA_SET_DEFINITION = "DataSetDefinition"
class AetherPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUC_WEIGHTED = "AUCWeighted"
ACCURACY = "Accuracy"
NORM_MACRO_RECALL = "NormMacroRecall"
AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted"
PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted"
SPEARMAN_CORRELATION = "SpearmanCorrelation"
NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError"
R2_SCORE = "R2Score"
NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError"
NORMALIZED_ROOT_MEAN_SQUARED_LOG_ERROR = "NormalizedRootMeanSquaredLogError"
MEAN_AVERAGE_PRECISION = "MeanAveragePrecision"
IOU = "Iou"
class AetherRepositoryType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
OTHER = "Other"
GIT = "Git"
SOURCE_DEPOT = "SourceDepot"
COSMOS = "Cosmos"
class AetherResourceOperator(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
EQUAL = "Equal"
CONTAIN = "Contain"
GREATER_OR_EQUAL = "GreaterOrEqual"
class AetherResourceValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
STRING = "String"
DOUBLE = "Double"
class AetherSamplingAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
RANDOM = "Random"
GRID = "Grid"
BAYESIAN = "Bayesian"
class AetherSeasonalityMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class AetherShortSeriesHandlingConfiguration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
PAD = "Pad"
DROP = "Drop"
class AetherStackMetaLearnerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
LOGISTIC_REGRESSION = "LogisticRegression"
LOGISTIC_REGRESSION_CV = "LogisticRegressionCV"
LIGHT_GBM_CLASSIFIER = "LightGBMClassifier"
ELASTIC_NET = "ElasticNet"
ELASTIC_NET_CV = "ElasticNetCV"
LIGHT_GBM_REGRESSOR = "LightGBMRegressor"
LINEAR_REGRESSION = "LinearRegression"
class AetherStoredProcedureParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
STRING = "String"
INT = "Int"
DECIMAL = "Decimal"
GUID = "Guid"
BOOLEAN = "Boolean"
DATE = "Date"
class AetherTabularTrainingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DISTRIBUTED = "Distributed"
NON_DISTRIBUTED = "NonDistributed"
AUTO = "Auto"
class AetherTargetAggregationFunction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SUM = "Sum"
MAX = "Max"
MIN = "Min"
MEAN = "Mean"
class AetherTargetLagsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class AetherTargetRollingWindowSizeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class AetherTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CLASSIFICATION = "Classification"
REGRESSION = "Regression"
FORECASTING = "Forecasting"
IMAGE_CLASSIFICATION = "ImageClassification"
IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel"
IMAGE_OBJECT_DETECTION = "ImageObjectDetection"
IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation"
TEXT_CLASSIFICATION = "TextClassification"
TEXT_MULTI_LABELING = "TextMultiLabeling"
TEXT_NER = "TextNER"
TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel"
class AetherTrainingOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
METRICS = "Metrics"
MODEL = "Model"
class AetherUIScriptLanguageEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PYTHON = "Python"
R = "R"
JSON = "Json"
SQL = "Sql"
class AetherUIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "Default"
MODE = "Mode"
COLUMN_PICKER = "ColumnPicker"
CREDENTIAL = "Credential"
SCRIPT = "Script"
COMPUTE_SELECTION = "ComputeSelection"
JSON_EDITOR = "JsonEditor"
SEARCH_SPACE_PARAMETER = "SearchSpaceParameter"
SECTION_TOGGLE = "SectionToggle"
YAML_EDITOR = "YamlEditor"
ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep"
DATA_STORE_SELECTION = "DataStoreSelection"
INSTANCE_TYPE_SELECTION = "InstanceTypeSelection"
CONNECTION_SELECTION = "ConnectionSelection"
PROMPT_FLOW_CONNECTION_SELECTION = "PromptFlowConnectionSelection"
AZURE_OPEN_AI_DEPLOYMENT_NAME_SELECTION = "AzureOpenAIDeploymentNameSelection"
class AetherUploadState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UPLOADING = "Uploading"
COMPLETED = "Completed"
CANCELED = "Canceled"
FAILED = "Failed"
class AetherUseStl(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SEASON = "Season"
SEASON_TREND = "SeasonTrend"
class AEVAAssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
URI_FILE = "UriFile"
URI_FOLDER = "UriFolder"
ML_TABLE = "MLTable"
CUSTOM_MODEL = "CustomModel"
ML_FLOW_MODEL = "MLFlowModel"
TRITON_MODEL = "TritonModel"
OPEN_AI_MODEL = "OpenAIModel"
class AEVADataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
MOUNT = "Mount"
DOWNLOAD = "Download"
UPLOAD = "Upload"
DIRECT = "Direct"
HDFS = "Hdfs"
LINK = "Link"
class AEVAIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
USER_IDENTITY = "UserIdentity"
MANAGED = "Managed"
AML_TOKEN = "AMLToken"
class ApplicationEndpointType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
JUPYTER = "Jupyter"
JUPYTER_LAB = "JupyterLab"
SSH = "SSH"
TENSOR_BOARD = "TensorBoard"
VS_CODE = "VSCode"
THEIA = "Theia"
GRAFANA = "Grafana"
CUSTOM = "Custom"
RAY_DASHBOARD = "RayDashboard"
class ArgumentValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LITERAL = "Literal"
PARAMETER = "Parameter"
INPUT = "Input"
OUTPUT = "Output"
NESTED_LIST = "NestedList"
STRING_INTERPOLATION_LIST = "StringInterpolationList"
class AssetScopeTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
WORKSPACE = "Workspace"
GLOBAL_ENUM = "Global"
ALL = "All"
FEED = "Feed"
class AssetSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
LOCAL = "Local"
GITHUB_FILE = "GithubFile"
GITHUB_FOLDER = "GithubFolder"
DEVOPS_ARTIFACTS_ZIP = "DevopsArtifactsZip"
class AssetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
COMPONENT = "Component"
MODEL = "Model"
ENVIRONMENT = "Environment"
DATASET = "Dataset"
DATA_STORE = "DataStore"
SAMPLE_GRAPH = "SampleGraph"
FLOW_TOOL = "FlowTool"
FLOW_TOOL_SETTING = "FlowToolSetting"
FLOW_CONNECTION = "FlowConnection"
FLOW_SAMPLE = "FlowSample"
FLOW_RUNTIME_SPEC = "FlowRuntimeSpec"
class AutoDeleteCondition(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CREATED_GREATER_THAN = "CreatedGreaterThan"
LAST_ACCESSED_GREATER_THAN = "LastAccessedGreaterThan"
class BuildContextLocationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
GIT = "Git"
STORAGE_ACCOUNT = "StorageAccount"
class Communicator(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PARAMETER_SERVER = "ParameterServer"
GLOO = "Gloo"
MPI = "Mpi"
NCCL = "Nccl"
PARALLEL_TASK = "ParallelTask"
class ComponentRegistrationTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "Normal"
ANONYMOUS_AML_MODULE = "AnonymousAmlModule"
ANONYMOUS_AML_MODULE_VERSION = "AnonymousAmlModuleVersion"
MODULE_ENTITY_ONLY = "ModuleEntityOnly"
class ComponentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
COMMAND_COMPONENT = "CommandComponent"
COMMAND = "Command"
class ComputeEnvironmentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ACI = "ACI"
AKS = "AKS"
AMLCOMPUTE = "AMLCOMPUTE"
IOT = "IOT"
AKSENDPOINT = "AKSENDPOINT"
MIRSINGLEMODEL = "MIRSINGLEMODEL"
MIRAMLCOMPUTE = "MIRAMLCOMPUTE"
MIRGA = "MIRGA"
AMLARC = "AMLARC"
BATCHAMLCOMPUTE = "BATCHAMLCOMPUTE"
UNKNOWN = "UNKNOWN"
class ComputeTargetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LOCAL = "Local"
REMOTE = "Remote"
HDI_CLUSTER = "HdiCluster"
CONTAINER_INSTANCE = "ContainerInstance"
AML_COMPUTE = "AmlCompute"
COMPUTE_INSTANCE = "ComputeInstance"
CMK8_S = "Cmk8s"
SYNAPSE_SPARK = "SynapseSpark"
KUBERNETES = "Kubernetes"
AISC = "Aisc"
GLOBAL_JOB_DISPATCHER = "GlobalJobDispatcher"
DATABRICKS = "Databricks"
MOCKED_COMPUTE = "MockedCompute"
class ComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
BATCH_AI = "BatchAi"
MLC = "MLC"
HDI_CLUSTER = "HdiCluster"
REMOTE_DOCKER = "RemoteDocker"
DATABRICKS = "Databricks"
AISC = "Aisc"
class ConfigValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
STRING = "String"
SECRET = "Secret"
class ConnectionCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PYTHON_FEED = "PythonFeed"
ACR = "ACR"
GIT = "Git"
S3 = "S3"
SNOWFLAKE = "Snowflake"
AZURE_SQL_DB = "AzureSqlDb"
AZURE_SYNAPSE_ANALYTICS = "AzureSynapseAnalytics"
AZURE_MY_SQL_DB = "AzureMySqlDb"
AZURE_POSTGRES_DB = "AzurePostgresDb"
AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
REDIS = "Redis"
API_KEY = "ApiKey"
AZURE_OPEN_AI = "AzureOpenAI"
COGNITIVE_SEARCH = "CognitiveSearch"
COGNITIVE_SERVICE = "CognitiveService"
CUSTOM_KEYS = "CustomKeys"
AZURE_BLOB = "AzureBlob"
AZURE_ONE_LAKE = "AzureOneLake"
COSMOS_DB = "CosmosDb"
COSMOS_DB_MONGO_DB_API = "CosmosDbMongoDbApi"
AZURE_DATA_EXPLORER = "AzureDataExplorer"
AZURE_MARIA_DB = "AzureMariaDb"
AZURE_DATABRICKS_DELTA_LAKE = "AzureDatabricksDeltaLake"
AZURE_SQL_MI = "AzureSqlMi"
AZURE_TABLE_STORAGE = "AzureTableStorage"
AMAZON_RDS_FOR_ORACLE = "AmazonRdsForOracle"
AMAZON_RDS_FOR_SQL_SERVER = "AmazonRdsForSqlServer"
AMAZON_REDSHIFT = "AmazonRedshift"
DB2 = "Db2"
DRILL = "Drill"
GOOGLE_BIG_QUERY = "GoogleBigQuery"
GREENPLUM = "Greenplum"
HBASE = "Hbase"
HIVE = "Hive"
IMPALA = "Impala"
INFORMIX = "Informix"
MARIA_DB = "MariaDb"
MICROSOFT_ACCESS = "MicrosoftAccess"
MY_SQL = "MySql"
NETEZZA = "Netezza"
ORACLE = "Oracle"
PHOENIX = "Phoenix"
POSTGRE_SQL = "PostgreSql"
PRESTO = "Presto"
SAP_OPEN_HUB = "SapOpenHub"
SAP_BW = "SapBw"
SAP_HANA = "SapHana"
SAP_TABLE = "SapTable"
SPARK = "Spark"
SQL_SERVER = "SqlServer"
SYBASE = "Sybase"
TERADATA = "Teradata"
VERTICA = "Vertica"
CASSANDRA = "Cassandra"
COUCHBASE = "Couchbase"
MONGO_DB_V2 = "MongoDbV2"
MONGO_DB_ATLAS = "MongoDbAtlas"
AMAZON_S3_COMPATIBLE = "AmazonS3Compatible"
FILE_SERVER = "FileServer"
FTP_SERVER = "FtpServer"
GOOGLE_CLOUD_STORAGE = "GoogleCloudStorage"
HDFS = "Hdfs"
ORACLE_CLOUD_STORAGE = "OracleCloudStorage"
SFTP = "Sftp"
GENERIC_HTTP = "GenericHttp"
O_DATA_REST = "ODataRest"
ODBC = "Odbc"
GENERIC_REST = "GenericRest"
AMAZON_MWS = "AmazonMws"
CONCUR = "Concur"
DYNAMICS = "Dynamics"
DYNAMICS_AX = "DynamicsAx"
DYNAMICS_CRM = "DynamicsCrm"
GOOGLE_AD_WORDS = "GoogleAdWords"
HUBSPOT = "Hubspot"
JIRA = "Jira"
MAGENTO = "Magento"
MARKETO = "Marketo"
OFFICE365 = "Office365"
ELOQUA = "Eloqua"
RESPONSYS = "Responsys"
ORACLE_SERVICE_CLOUD = "OracleServiceCloud"
PAY_PAL = "PayPal"
QUICK_BOOKS = "QuickBooks"
SALESFORCE = "Salesforce"
SALESFORCE_SERVICE_CLOUD = "SalesforceServiceCloud"
SALESFORCE_MARKETING_CLOUD = "SalesforceMarketingCloud"
SAP_CLOUD_FOR_CUSTOMER = "SapCloudForCustomer"
SAP_ECC = "SapEcc"
SERVICE_NOW = "ServiceNow"
SHARE_POINT_ONLINE_LIST = "SharePointOnlineList"
SHOPIFY = "Shopify"
SQUARE = "Square"
WEB_TABLE = "WebTable"
XERO = "Xero"
ZOHO = "Zoho"
GENERIC_CONTAINER_REGISTRY = "GenericContainerRegistry"
class ConnectionScope(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
USER = "User"
WORKSPACE_SHARED = "WorkspaceShared"
class ConnectionSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NODE = "Node"
NODE_INPUT = "NodeInput"
class ConnectionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
OPEN_AI = "OpenAI"
AZURE_OPEN_AI = "AzureOpenAI"
SERP = "Serp"
BING = "Bing"
AZURE_CONTENT_MODERATOR = "AzureContentModerator"
CUSTOM = "Custom"
AZURE_CONTENT_SAFETY = "AzureContentSafety"
COGNITIVE_SEARCH = "CognitiveSearch"
SUBSTRATE_LLM = "SubstrateLLM"
PINECONE = "Pinecone"
QDRANT = "Qdrant"
WEAVIATE = "Weaviate"
FORM_RECOGNIZER = "FormRecognizer"
class ConsumeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
REFERENCE = "Reference"
COPY = "Copy"
COPY_AND_AUTO_UPGRADE = "CopyAndAutoUpgrade"
class ControlFlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
DO_WHILE = "DoWhile"
PARALLEL_FOR = "ParallelFor"
class ControlInputValue(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
FALSE = "False"
TRUE = "True"
SKIPPED = "Skipped"
class DataBindingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MOUNT = "Mount"
DOWNLOAD = "Download"
UPLOAD = "Upload"
READ_ONLY_MOUNT = "ReadOnlyMount"
READ_WRITE_MOUNT = "ReadWriteMount"
DIRECT = "Direct"
EVAL_MOUNT = "EvalMount"
EVAL_DOWNLOAD = "EvalDownload"
class DataCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ALL = "All"
DATASET = "Dataset"
MODEL = "Model"
class DataCopyMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MERGE_WITH_OVERWRITE = "MergeWithOverwrite"
FAIL_IF_CONFLICT = "FailIfConflict"
class DataLocationStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
AZURE_BLOB = "AzureBlob"
ARTIFACT = "Artifact"
SNAPSHOT = "Snapshot"
SAVED_AML_DATASET = "SavedAmlDataset"
ASSET = "Asset"
class DataPortType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INPUT = "Input"
OUTPUT = "Output"
class DataReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
AZURE_BLOB = "AzureBlob"
AZURE_DATA_LAKE = "AzureDataLake"
AZURE_FILES = "AzureFiles"
AZURE_SQL_DATABASE = "AzureSqlDatabase"
AZURE_POSTGRES_DATABASE = "AzurePostgresDatabase"
AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
DBFS = "DBFS"
AZURE_MY_SQL_DATABASE = "AzureMySqlDatabase"
CUSTOM = "Custom"
HDFS = "Hdfs"
class DatasetAccessModes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "Default"
DATASET_IN_DPV2 = "DatasetInDpv2"
ASSET_IN_DPV2 = "AssetInDpv2"
DATASET_IN_DESIGNER_UI = "DatasetInDesignerUI"
DATASET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "DatasetInDpv2WithDatasetInDesignerUI"
DATASET = "Dataset"
ASSET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "AssetInDpv2WithDatasetInDesignerUI"
DATASET_AND_ASSET_IN_DPV2_WITH_DATASET_IN_DESIGNER_UI = "DatasetAndAssetInDpv2WithDatasetInDesignerUI"
ASSET_IN_DESIGNER_UI = "AssetInDesignerUI"
ASSET_IN_DPV2_WITH_ASSET_IN_DESIGNER_UI = "AssetInDpv2WithAssetInDesignerUI"
ASSET = "Asset"
class DatasetConsumptionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
RUN_INPUT = "RunInput"
REFERENCE = "Reference"
class DatasetDeliveryMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DIRECT = "Direct"
MOUNT = "Mount"
DOWNLOAD = "Download"
HDFS = "Hdfs"
class DatasetOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
RUN_OUTPUT = "RunOutput"
REFERENCE = "Reference"
class DatasetType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
FILE = "File"
TABULAR = "Tabular"
class DataSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PIPELINE_DATA_SOURCE = "PipelineDataSource"
AML_DATASET = "AmlDataset"
GLOBAL_DATASET = "GlobalDataset"
FEED_MODEL = "FeedModel"
FEED_DATASET = "FeedDataset"
AML_DATA_VERSION = "AmlDataVersion"
AML_MODEL_VERSION = "AMLModelVersion"
class DataStoreMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MOUNT = "Mount"
DOWNLOAD = "Download"
UPLOAD = "Upload"
class DataTransferStorageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DATA_BASE = "DataBase"
FILE_SYSTEM = "FileSystem"
class DataTransferTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
IMPORT_DATA = "ImportData"
EXPORT_DATA = "ExportData"
COPY_DATA = "CopyData"
class DataTypeMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ERROR_WHEN_NOT_EXISTING = "ErrorWhenNotExisting"
REGISTER_WHEN_NOT_EXISTING = "RegisterWhenNotExisting"
REGISTER_BUILDIN_DATA_TYPE_ONLY = "RegisterBuildinDataTypeOnly"
class DeliveryMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DIRECT = "Direct"
MOUNT = "Mount"
DOWNLOAD = "Download"
HDFS = "Hdfs"
class DistributionParameterEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
TEXT = "Text"
NUMBER = "Number"
class DistributionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PY_TORCH = "PyTorch"
TENSOR_FLOW = "TensorFlow"
MPI = "Mpi"
RAY = "Ray"
class EarlyTerminationPolicyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
BANDIT = "Bandit"
MEDIAN_STOPPING = "MedianStopping"
TRUNCATION_SELECTION = "TruncationSelection"
class EmailNotificationEnableType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
JOB_COMPLETED = "JobCompleted"
JOB_FAILED = "JobFailed"
JOB_CANCELLED = "JobCancelled"
class EndpointAuthMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AML_TOKEN = "AMLToken"
KEY = "Key"
AAD_TOKEN = "AADToken"
class EntityKind(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INVALID = "Invalid"
LINEAGE_ROOT = "LineageRoot"
VERSIONED = "Versioned"
UNVERSIONED = "Unversioned"
class EntityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ACTIVE = "Active"
DEPRECATED = "Deprecated"
DISABLED = "Disabled"
class ErrorHandlingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT_INTERPOLATION = "DefaultInterpolation"
CUSTOMER_FACING_INTERPOLATION = "CustomerFacingInterpolation"
class ExecutionPhase(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
EXECUTION = "Execution"
INITIALIZATION = "Initialization"
FINALIZATION = "Finalization"
class FeaturizationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
OFF = "Off"
class FlowFeatureStateEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
READY = "Ready"
E2_E_TEST = "E2ETest"
class FlowLanguage(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PYTHON = "Python"
C_SHARP = "CSharp"
class FlowPatchOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ARCHIVE_FLOW = "ArchiveFlow"
RESTORE_FLOW = "RestoreFlow"
EXPORT_FLOW_TO_FILE = "ExportFlowToFile"
class FlowRunMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
FLOW = "Flow"
SINGLE_NODE = "SingleNode"
FROM_NODE = "FromNode"
BULK_TEST = "BulkTest"
EVAL = "Eval"
PAIRWISE_EVAL = "PairwiseEval"
class FlowRuntimeSubmissionApiVersion(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
VERSION1 = "Version1"
VERSION2 = "Version2"
class FlowRunTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
FLOW_RUN = "FlowRun"
EVALUATION_RUN = "EvaluationRun"
PAIRWISE_EVALUATION_RUN = "PairwiseEvaluationRun"
SINGLE_NODE_RUN = "SingleNodeRun"
FROM_NODE_RUN = "FromNodeRun"
class FlowTestMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SYNC = "Sync"
ASYNC_ENUM = "Async"
class FlowType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "Default"
EVALUATION = "Evaluation"
CHAT = "Chat"
RAG = "Rag"
class ForecastHorizonMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class Framework(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PYTHON = "Python"
PY_SPARK = "PySpark"
CNTK = "Cntk"
TENSOR_FLOW = "TensorFlow"
PY_TORCH = "PyTorch"
PY_SPARK_INTERACTIVE = "PySparkInteractive"
R = "R"
class Frequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MONTH = "Month"
WEEK = "Week"
DAY = "Day"
HOUR = "Hour"
MINUTE = "Minute"
class GlobalJobDispatcherSupportedComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AML_COMPUTE = "AmlCompute"
AML_K8_S = "AmlK8s"
class GraphComponentsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "Normal"
ALL_DESIGNER_BUILDIN = "AllDesignerBuildin"
CONTAINS_DESIGNER_BUILDIN = "ContainsDesignerBuildin"
class GraphDatasetsLoadModes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SKIP_DATASETS_LOAD = "SkipDatasetsLoad"
V1_REGISTERED_DATASET = "V1RegisteredDataset"
V1_SAVED_DATASET = "V1SavedDataset"
PERSIST_DATASETS_INFO = "PersistDatasetsInfo"
SUBMISSION_NEEDED_UPSTREAM_DATASET_ONLY = "SubmissionNeededUpstreamDatasetOnly"
SUBMISSION_NEEDED_IN_COMPLETE_DATASET_ONLY = "SubmissionNeededInCompleteDatasetOnly"
V2_ASSET = "V2Asset"
SUBMISSION = "Submission"
ALL_REGISTERED_DATA = "AllRegisteredData"
ALL_DATA = "AllData"
class GraphSdkCodeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PYTHON = "Python"
JUPYTER_NOTEBOOK = "JupyterNotebook"
UNKNOWN = "Unknown"
class HttpStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CONTINUE_ENUM = "Continue"
SWITCHING_PROTOCOLS = "SwitchingProtocols"
PROCESSING = "Processing"
EARLY_HINTS = "EarlyHints"
OK = "OK"
CREATED = "Created"
ACCEPTED = "Accepted"
NON_AUTHORITATIVE_INFORMATION = "NonAuthoritativeInformation"
NO_CONTENT = "NoContent"
RESET_CONTENT = "ResetContent"
PARTIAL_CONTENT = "PartialContent"
MULTI_STATUS = "MultiStatus"
ALREADY_REPORTED = "AlreadyReported"
IM_USED = "IMUsed"
MULTIPLE_CHOICES = "MultipleChoices"
AMBIGUOUS = "Ambiguous"
MOVED_PERMANENTLY = "MovedPermanently"
MOVED = "Moved"
FOUND = "Found"
REDIRECT = "Redirect"
SEE_OTHER = "SeeOther"
REDIRECT_METHOD = "RedirectMethod"
NOT_MODIFIED = "NotModified"
USE_PROXY = "UseProxy"
UNUSED = "Unused"
TEMPORARY_REDIRECT = "TemporaryRedirect"
REDIRECT_KEEP_VERB = "RedirectKeepVerb"
PERMANENT_REDIRECT = "PermanentRedirect"
BAD_REQUEST = "BadRequest"
UNAUTHORIZED = "Unauthorized"
PAYMENT_REQUIRED = "PaymentRequired"
FORBIDDEN = "Forbidden"
NOT_FOUND = "NotFound"
METHOD_NOT_ALLOWED = "MethodNotAllowed"
NOT_ACCEPTABLE = "NotAcceptable"
PROXY_AUTHENTICATION_REQUIRED = "ProxyAuthenticationRequired"
REQUEST_TIMEOUT = "RequestTimeout"
CONFLICT = "Conflict"
GONE = "Gone"
LENGTH_REQUIRED = "LengthRequired"
PRECONDITION_FAILED = "PreconditionFailed"
REQUEST_ENTITY_TOO_LARGE = "RequestEntityTooLarge"
REQUEST_URI_TOO_LONG = "RequestUriTooLong"
UNSUPPORTED_MEDIA_TYPE = "UnsupportedMediaType"
REQUESTED_RANGE_NOT_SATISFIABLE = "RequestedRangeNotSatisfiable"
EXPECTATION_FAILED = "ExpectationFailed"
MISDIRECTED_REQUEST = "MisdirectedRequest"
UNPROCESSABLE_ENTITY = "UnprocessableEntity"
LOCKED = "Locked"
FAILED_DEPENDENCY = "FailedDependency"
UPGRADE_REQUIRED = "UpgradeRequired"
PRECONDITION_REQUIRED = "PreconditionRequired"
TOO_MANY_REQUESTS = "TooManyRequests"
REQUEST_HEADER_FIELDS_TOO_LARGE = "RequestHeaderFieldsTooLarge"
UNAVAILABLE_FOR_LEGAL_REASONS = "UnavailableForLegalReasons"
INTERNAL_SERVER_ERROR = "InternalServerError"
NOT_IMPLEMENTED = "NotImplemented"
BAD_GATEWAY = "BadGateway"
SERVICE_UNAVAILABLE = "ServiceUnavailable"
GATEWAY_TIMEOUT = "GatewayTimeout"
HTTP_VERSION_NOT_SUPPORTED = "HttpVersionNotSupported"
VARIANT_ALSO_NEGOTIATES = "VariantAlsoNegotiates"
INSUFFICIENT_STORAGE = "InsufficientStorage"
LOOP_DETECTED = "LoopDetected"
NOT_EXTENDED = "NotExtended"
NETWORK_AUTHENTICATION_REQUIRED = "NetworkAuthenticationRequired"
class IdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MANAGED = "Managed"
SERVICE_PRINCIPAL = "ServicePrincipal"
AML_TOKEN = "AMLToken"
class InputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "default"
UIONLY_HIDDEN = "uionly_hidden"
class IntellectualPropertyAccessMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
READ_ONLY = "ReadOnly"
READ_WRITE = "ReadWrite"
class JobInputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DATASET = "Dataset"
URI = "Uri"
LITERAL = "Literal"
URI_FILE = "UriFile"
URI_FOLDER = "UriFolder"
ML_TABLE = "MLTable"
CUSTOM_MODEL = "CustomModel"
ML_FLOW_MODEL = "MLFlowModel"
TRITON_MODEL = "TritonModel"
class JobLimitsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
COMMAND = "Command"
SWEEP = "Sweep"
class JobOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
URI = "Uri"
DATASET = "Dataset"
URI_FILE = "UriFile"
URI_FOLDER = "UriFolder"
ML_TABLE = "MLTable"
CUSTOM_MODEL = "CustomModel"
ML_FLOW_MODEL = "MLFlowModel"
TRITON_MODEL = "TritonModel"
class JobProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
IN_PROGRESS = "InProgress"
class JobStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
STARTING = "Starting"
PROVISIONING = "Provisioning"
PREPARING = "Preparing"
QUEUED = "Queued"
RUNNING = "Running"
FINALIZING = "Finalizing"
CANCEL_REQUESTED = "CancelRequested"
COMPLETED = "Completed"
FAILED = "Failed"
CANCELED = "Canceled"
NOT_RESPONDING = "NotResponding"
PAUSED = "Paused"
UNKNOWN = "Unknown"
SCHEDULED = "Scheduled"
class JobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
COMMAND = "Command"
SWEEP = "Sweep"
LABELING = "Labeling"
PIPELINE = "Pipeline"
DATA = "Data"
AUTO_ML = "AutoML"
SPARK = "Spark"
BASE = "Base"
class KeyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PRIMARY = "Primary"
SECONDARY = "Secondary"
class ListViewType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ACTIVE_ONLY = "ActiveOnly"
ARCHIVED_ONLY = "ArchivedOnly"
ALL = "All"
class LogLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
TRACE = "Trace"
DEBUG = "Debug"
INFORMATION = "Information"
WARNING = "Warning"
ERROR = "Error"
CRITICAL = "Critical"
NONE = "None"
class LogVerbosity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_SET = "NotSet"
DEBUG = "Debug"
INFO = "Info"
WARNING = "Warning"
ERROR = "Error"
CRITICAL = "Critical"
class LongRunningUpdateType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ENABLE_MODULE = "EnableModule"
DISABLE_MODULE = "DisableModule"
UPDATE_DISPLAY_NAME = "UpdateDisplayName"
UPDATE_DESCRIPTION = "UpdateDescription"
UPDATE_TAGS = "UpdateTags"
class ManagedServiceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssignedUserAssigned"
NONE = "None"
class MetricValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INT = "Int"
DOUBLE = "Double"
STRING = "String"
BOOL = "Bool"
ARTIFACT = "Artifact"
HISTOGRAM = "Histogram"
MALFORMED = "Malformed"
class MfeInternalIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MANAGED = "Managed"
AML_TOKEN = "AMLToken"
USER_IDENTITY = "UserIdentity"
class MfeInternalMLFlowAutologgerState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ENABLED = "Enabled"
DISABLED = "Disabled"
class MfeInternalScheduleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ENABLED = "Enabled"
DISABLED = "Disabled"
class MLFlowAutologgerState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ENABLED = "Enabled"
DISABLED = "Disabled"
class ModuleDtoFields(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFINITION = "Definition"
YAML_STR = "YamlStr"
REGISTRATION_CONTEXT = "RegistrationContext"
RUN_SETTING_PARAMETERS = "RunSettingParameters"
RUN_DEFINITION = "RunDefinition"
ALL = "All"
DEFAULT = "Default"
BASIC = "Basic"
MINIMAL = "Minimal"
class ModuleInfoFromYamlStatusEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NEW_MODULE = "NewModule"
NEW_VERSION = "NewVersion"
CONFLICT = "Conflict"
PARSE_ERROR = "ParseError"
PROCESS_REQUEST_ERROR = "ProcessRequestError"
class ModuleRunSettingTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ALL = "All"
RELEASED = "Released"
DEFAULT = "Default"
TESTING = "Testing"
LEGACY = "Legacy"
PREVIEW = "Preview"
UX_FULL = "UxFull"
INTEGRATION = "Integration"
UX_INTEGRATION = "UxIntegration"
FULL = "Full"
class ModuleScope(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ALL = "All"
GLOBAL_ENUM = "Global"
WORKSPACE = "Workspace"
ANONYMOUS = "Anonymous"
STEP = "Step"
DRAFT = "Draft"
FEED = "Feed"
REGISTRY = "Registry"
SYSTEM_AUTO_CREATED = "SystemAutoCreated"
class ModuleSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
LOCAL = "Local"
GITHUB_FILE = "GithubFile"
GITHUB_FOLDER = "GithubFolder"
DEVOPS_ARTIFACTS_ZIP = "DevopsArtifactsZip"
SERIALIZED_MODULE_INFO = "SerializedModuleInfo"
class ModuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
BATCH_INFERENCING = "BatchInferencing"
class ModuleUpdateOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SET_DEFAULT_VERSION = "SetDefaultVersion"
ENABLE_MODULE = "EnableModule"
DISABLE_MODULE = "DisableModule"
UPDATE_DISPLAY_NAME = "UpdateDisplayName"
UPDATE_DESCRIPTION = "UpdateDescription"
UPDATE_TAGS = "UpdateTags"
class ModuleWorkingMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "Normal"
OUTPUT_TO_DATASET = "OutputToDataset"
class NCrossValidationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class NodeCompositionMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
ONLY_SEQUENTIAL = "OnlySequential"
FULL = "Full"
class NodesValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ALL = "All"
CUSTOM = "Custom"
class Orientation(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
HORIZONTAL = "Horizontal"
VERTICAL = "Vertical"
class OutputMechanism(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UPLOAD = "Upload"
MOUNT = "Mount"
HDFS = "Hdfs"
LINK = "Link"
DIRECT = "Direct"
class ParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INT = "Int"
DOUBLE = "Double"
BOOL = "Bool"
STRING = "String"
UNDEFINED = "Undefined"
class ParameterValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LITERAL = "Literal"
GRAPH_PARAMETER_NAME = "GraphParameterName"
CONCATENATE = "Concatenate"
INPUT = "Input"
DATA_PATH = "DataPath"
DATA_SET_DEFINITION = "DataSetDefinition"
class PipelineDraftMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
NORMAL = "Normal"
CUSTOM = "Custom"
class PipelineRunStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
RUNNING = "Running"
FAILED = "Failed"
FINISHED = "Finished"
CANCELED = "Canceled"
QUEUED = "Queued"
CANCEL_REQUESTED = "CancelRequested"
class PipelineStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
IN_DRAFT = "InDraft"
PREPARING = "Preparing"
RUNNING = "Running"
FAILED = "Failed"
FINISHED = "Finished"
CANCELED = "Canceled"
THROTTLED = "Throttled"
UNKNOWN = "Unknown"
class PipelineType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
TRAINING_PIPELINE = "TrainingPipeline"
REAL_TIME_INFERENCE_PIPELINE = "RealTimeInferencePipeline"
BATCH_INFERENCE_PIPELINE = "BatchInferencePipeline"
UNKNOWN = "Unknown"
class PortAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PROMOTE = "Promote"
VIEW_IN_DATA_STORE = "ViewInDataStore"
VISUALIZE = "Visualize"
GET_SCHEMA = "GetSchema"
CREATE_INFERENCE_GRAPH = "CreateInferenceGraph"
REGISTER_MODEL = "RegisterModel"
PROMOTE_AS_TABULAR = "PromoteAsTabular"
class PrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUC_WEIGHTED = "AUCWeighted"
ACCURACY = "Accuracy"
NORM_MACRO_RECALL = "NormMacroRecall"
AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted"
PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted"
SPEARMAN_CORRELATION = "SpearmanCorrelation"
NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError"
R2_SCORE = "R2Score"
NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError"
NORMALIZED_ROOT_MEAN_SQUARED_LOG_ERROR = "NormalizedRootMeanSquaredLogError"
MEAN_AVERAGE_PRECISION = "MeanAveragePrecision"
IOU = "Iou"
class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
UPDATING = "Updating"
CREATING = "Creating"
DELETING = "Deleting"
ACCEPTED = "Accepted"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
class RealTimeEndpointInternalStepCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ABOUT_TO_DEPLOY = "AboutToDeploy"
WAIT_AKS_COMPUTE_READY = "WaitAksComputeReady"
REGISTER_MODELS = "RegisterModels"
CREATE_SERVICE_FROM_MODELS = "CreateServiceFromModels"
UPDATE_SERVICE_FROM_MODELS = "UpdateServiceFromModels"
WAIT_SERVICE_CREATING = "WaitServiceCreating"
FETCH_SERVICE_RELATED_INFO = "FetchServiceRelatedInfo"
TEST_WITH_SAMPLE_DATA = "TestWithSampleData"
ABOUT_TO_DELETE = "AboutToDelete"
DELETE_DEPLOYMENT = "DeleteDeployment"
DELETE_ASSET = "DeleteAsset"
DELETE_IMAGE = "DeleteImage"
DELETE_MODEL = "DeleteModel"
DELETE_SERVICE_RECORD = "DeleteServiceRecord"
class RealTimeEndpointOpCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CREATE = "Create"
UPDATE = "Update"
DELETE = "Delete"
class RealTimeEndpointOpStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ONGOING = "Ongoing"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
SUCCEEDED_WITH_WARNING = "SucceededWithWarning"
class RecurrenceFrequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MINUTE = "Minute"
HOUR = "Hour"
DAY = "Day"
WEEK = "Week"
MONTH = "Month"
class RunDisplayNameGenerationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO_APPEND = "AutoAppend"
USER_PROVIDED_MACRO = "UserProvidedMacro"
class RunSettingParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNDEFINED = "Undefined"
INT = "Int"
DOUBLE = "Double"
BOOL = "Bool"
STRING = "String"
JSON_STRING = "JsonString"
YAML_STRING = "YamlString"
STRING_LIST = "StringList"
class RunSettingUIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "Default"
COMPUTE_SELECTION = "ComputeSelection"
JSON_EDITOR = "JsonEditor"
MODE = "Mode"
SEARCH_SPACE_PARAMETER = "SearchSpaceParameter"
SECTION_TOGGLE = "SectionToggle"
YAML_EDITOR = "YamlEditor"
ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep"
DATA_STORE_SELECTION = "DataStoreSelection"
CHECKBOX = "Checkbox"
MULTIPLE_SELECTION = "MultipleSelection"
HYPERPARAMETER_CONFIGURATION = "HyperparameterConfiguration"
JSON_TEXT_BOX = "JsonTextBox"
CONNECTION = "Connection"
STATIC = "Static"
class RunStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
UNAPPROVED = "Unapproved"
PAUSING = "Pausing"
PAUSED = "Paused"
STARTING = "Starting"
PREPARING = "Preparing"
QUEUED = "Queued"
RUNNING = "Running"
FINALIZING = "Finalizing"
CANCEL_REQUESTED = "CancelRequested"
COMPLETED = "Completed"
FAILED = "Failed"
CANCELED = "Canceled"
class RuntimeStatusEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UNAVAILABLE = "Unavailable"
FAILED = "Failed"
NOT_EXIST = "NotExist"
STARTING = "Starting"
STOPPING = "Stopping"
class RuntimeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MANAGED_ONLINE_ENDPOINT = "ManagedOnlineEndpoint"
COMPUTE_INSTANCE = "ComputeInstance"
TRAINING_SESSION = "TrainingSession"
class RunType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
HTTP = "HTTP"
SDK = "SDK"
SCHEDULE = "Schedule"
PORTAL = "Portal"
class SamplingAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
RANDOM = "Random"
GRID = "Grid"
BAYESIAN = "Bayesian"
class ScheduleProvisioningStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CREATING = "Creating"
UPDATING = "Updating"
DELETING = "Deleting"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
class ScheduleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ENABLED = "Enabled"
DISABLED = "Disabled"
class ScheduleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CRON = "Cron"
RECURRENCE = "Recurrence"
class ScopeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
GLOBAL_ENUM = "Global"
TENANT = "Tenant"
SUBSCRIPTION = "Subscription"
RESOURCE_GROUP = "ResourceGroup"
WORKSPACE = "Workspace"
class ScriptType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
PYTHON = "Python"
NOTEBOOK = "Notebook"
class SeasonalityMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class Section(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
GALLERY = "Gallery"
TEMPLATE = "Template"
class SessionSetupModeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CLIENT_WAIT = "ClientWait"
SYSTEM_WAIT = "SystemWait"
class SetupFlowSessionAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INSTALL = "Install"
RESET = "Reset"
UPDATE = "Update"
DELETE = "Delete"
class SeverityLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CRITICAL = "Critical"
ERROR = "Error"
WARNING = "Warning"
INFO = "Info"
class ShortSeriesHandlingConfiguration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
PAD = "Pad"
DROP = "Drop"
class StackMetaLearnerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
LOGISTIC_REGRESSION = "LogisticRegression"
LOGISTIC_REGRESSION_CV = "LogisticRegressionCV"
LIGHT_GBM_CLASSIFIER = "LightGBMClassifier"
ELASTIC_NET = "ElasticNet"
ELASTIC_NET_CV = "ElasticNetCV"
LIGHT_GBM_REGRESSOR = "LightGBMRegressor"
LINEAR_REGRESSION = "LinearRegression"
class StorageAuthType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MSI = "MSI"
CONNECTION_STRING = "ConnectionString"
SAS = "SAS"
class StoredProcedureParameterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
STRING = "String"
INT = "Int"
DECIMAL = "Decimal"
GUID = "Guid"
BOOLEAN = "Boolean"
DATE = "Date"
class SuccessfulCommandReturnCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ZERO = "Zero"
ZERO_OR_GREATER = "ZeroOrGreater"
class TabularTrainingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DISTRIBUTED = "Distributed"
NON_DISTRIBUTED = "NonDistributed"
AUTO = "Auto"
class TargetAggregationFunction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SUM = "Sum"
MAX = "Max"
MIN = "Min"
MEAN = "Mean"
class TargetLagsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class TargetRollingWindowSizeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
AUTO = "Auto"
CUSTOM = "Custom"
class TaskCreationOptions(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PREFER_FAIRNESS = "PreferFairness"
LONG_RUNNING = "LongRunning"
ATTACHED_TO_PARENT = "AttachedToParent"
DENY_CHILD_ATTACH = "DenyChildAttach"
HIDE_SCHEDULER = "HideScheduler"
RUN_CONTINUATIONS_ASYNCHRONOUSLY = "RunContinuationsAsynchronously"
class TaskStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CREATED = "Created"
WAITING_FOR_ACTIVATION = "WaitingForActivation"
WAITING_TO_RUN = "WaitingToRun"
RUNNING = "Running"
WAITING_FOR_CHILDREN_TO_COMPLETE = "WaitingForChildrenToComplete"
RAN_TO_COMPLETION = "RanToCompletion"
CANCELED = "Canceled"
FAULTED = "Faulted"
class TaskStatusCode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
QUEUED = "Queued"
RUNNING = "Running"
FAILED = "Failed"
FINISHED = "Finished"
CANCELED = "Canceled"
PARTIALLY_EXECUTED = "PartiallyExecuted"
BYPASSED = "Bypassed"
class TaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CLASSIFICATION = "Classification"
REGRESSION = "Regression"
FORECASTING = "Forecasting"
IMAGE_CLASSIFICATION = "ImageClassification"
IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel"
IMAGE_OBJECT_DETECTION = "ImageObjectDetection"
IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation"
TEXT_CLASSIFICATION = "TextClassification"
TEXT_MULTI_LABELING = "TextMultiLabeling"
TEXT_NER = "TextNER"
TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel"
class ToolFuncCallScenario(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
GENERATED_BY = "generated_by"
REVERSE_GENERATED_BY = "reverse_generated_by"
DYNAMIC_LIST = "dynamic_list"
class ToolState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
STABLE = "Stable"
PREVIEW = "Preview"
DEPRECATED = "Deprecated"
class ToolType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
LLM = "llm"
PYTHON = "python"
ACTION = "action"
PROMPT = "prompt"
CUSTOM_LLM = "custom_llm"
CSHARP = "csharp"
class TrainingOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
METRICS = "Metrics"
MODEL = "Model"
class TriggerOperationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
CREATE = "Create"
UPDATE = "Update"
DELETE = "Delete"
CREATE_OR_UPDATE = "CreateOrUpdate"
class TriggerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
RECURRENCE = "Recurrence"
CRON = "Cron"
class UIInputDataDeliveryMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
READ_ONLY_MOUNT = "Read-only mount"
READ_WRITE_MOUNT = "Read-write mount"
DOWNLOAD = "Download"
DIRECT = "Direct"
EVALUATE_MOUNT = "Evaluate mount"
EVALUATE_DOWNLOAD = "Evaluate download"
HDFS = "Hdfs"
class UIScriptLanguageEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
PYTHON = "Python"
R = "R"
JSON = "Json"
SQL = "Sql"
class UIWidgetTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "Default"
MODE = "Mode"
COLUMN_PICKER = "ColumnPicker"
CREDENTIAL = "Credential"
SCRIPT = "Script"
COMPUTE_SELECTION = "ComputeSelection"
JSON_EDITOR = "JsonEditor"
SEARCH_SPACE_PARAMETER = "SearchSpaceParameter"
SECTION_TOGGLE = "SectionToggle"
YAML_EDITOR = "YamlEditor"
ENABLE_RUNTIME_SWEEP = "EnableRuntimeSweep"
DATA_STORE_SELECTION = "DataStoreSelection"
INSTANCE_TYPE_SELECTION = "InstanceTypeSelection"
CONNECTION_SELECTION = "ConnectionSelection"
PROMPT_FLOW_CONNECTION_SELECTION = "PromptFlowConnectionSelection"
AZURE_OPEN_AI_DEPLOYMENT_NAME_SELECTION = "AzureOpenAIDeploymentNameSelection"
class UploadState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
UPLOADING = "Uploading"
COMPLETED = "Completed"
CANCELED = "Canceled"
FAILED = "Failed"
class UserType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
USER = "User"
APPLICATION = "Application"
MANAGED_IDENTITY = "ManagedIdentity"
KEY = "Key"
class UseStl(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SEASON = "Season"
SEASON_TREND = "SeasonTrend"
class ValidationStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
SUCCEEDED = "Succeeded"
FAILED = "Failed"
class ValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
INT = "int"
DOUBLE = "double"
BOOL = "bool"
STRING = "string"
SECRET = "secret"
PROMPT_TEMPLATE = "prompt_template"
OBJECT = "object"
LIST = "list"
BING_CONNECTION = "BingConnection"
OPEN_AI_CONNECTION = "OpenAIConnection"
AZURE_OPEN_AI_CONNECTION = "AzureOpenAIConnection"
AZURE_CONTENT_MODERATOR_CONNECTION = "AzureContentModeratorConnection"
CUSTOM_CONNECTION = "CustomConnection"
AZURE_CONTENT_SAFETY_CONNECTION = "AzureContentSafetyConnection"
SERP_CONNECTION = "SerpConnection"
COGNITIVE_SEARCH_CONNECTION = "CognitiveSearchConnection"
SUBSTRATE_LLM_CONNECTION = "SubstrateLLMConnection"
PINECONE_CONNECTION = "PineconeConnection"
QDRANT_CONNECTION = "QdrantConnection"
WEAVIATE_CONNECTION = "WeaviateConnection"
FUNCTION_LIST = "function_list"
FUNCTION_STR = "function_str"
FORM_RECOGNIZER_CONNECTION = "FormRecognizerConnection"
FILE_PATH = "file_path"
IMAGE = "image"
class VmPriority(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
DEDICATED = "Dedicated"
LOWPRIORITY = "Lowpriority"
class WebServiceState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
TRANSITIONING = "Transitioning"
HEALTHY = "Healthy"
UNHEALTHY = "Unhealthy"
FAILED = "Failed"
UNSCHEDULABLE = "Unschedulable"
class Weekday(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MONDAY = "Monday"
TUESDAY = "Tuesday"
WEDNESDAY = "Wednesday"
THURSDAY = "Thursday"
FRIDAY = "Friday"
SATURDAY = "Saturday"
SUNDAY = "Sunday"
class WeekDays(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
MONDAY = "Monday"
TUESDAY = "Tuesday"
WEDNESDAY = "Wednesday"
THURSDAY = "Thursday"
FRIDAY = "Friday"
SATURDAY = "Saturday"
SUNDAY = "Sunday"
class YarnDeployMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
NONE = "None"
CLIENT = "Client"
CLUSTER = "Cluster"
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/__init__.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
from .gerneral import is_arm_id
__all__ = ["is_arm_id"]
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/_url_utils.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import re
class BulkRunURL:
"""Parser for a flow run URL."""
REGEX_PATTERN = ".*prompts/flow/([^/]+)/([^/]+)/bulktest/([^/]+).*"
RUN_URL_FORMAT = (
"https://ml.azure.com/prompts/flow/{}/{}/bulktest/{}/details?wsid="
"/subscriptions/{}/resourcegroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}"
)
def __init__(self, url: str):
if url:
match = re.match(self.REGEX_PATTERN, url)
if match:
self.experiment_id = match.group(1)
self.flow_id = match.group(2)
self.bulk_test_id = match.group(3)
else:
raise ValueError("Invalid flow run URL: {}".format(url))
@classmethod
def get_url(cls, experiment_id, flow_id, bulk_test_id, subscription_id, resource_group, workspace_name):
return cls.RUN_URL_FORMAT.format(
experiment_id, flow_id, bulk_test_id, subscription_id, resource_group, workspace_name
)
class BulkRunId:
"""Parser for a flow run ID."""
REGEX_PATTERN = "azureml://experiment/([^/]+)/flow/([^/]+)/bulktest/([^/]+)(/run/[^/]+)?"
RUN_ID_FORMAT = "azureml://experiment/{}/flow/{}/bulktest/{}"
def __init__(self, arm_id: str):
if arm_id:
match = re.match(self.REGEX_PATTERN, arm_id)
if match:
self.experiment_id = match.group(1)
self.flow_id = match.group(2)
self.bulk_test_id = match.group(3)
if len(match.groups()) > 3:
self.run_id = match.group(4).split("/")[-1].strip()
else:
self.run_id = None
else:
raise ValueError("Invalid flow run ID: {}".format(arm_id))
@classmethod
def get_url(cls, experiment_id, flow_id, bulk_test_id, *, run_id=None):
arm_id = cls.RUN_ID_FORMAT.format(experiment_id, flow_id, bulk_test_id)
if run_id:
arm_id += "/run/{}".format(run_id)
return arm_id
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow/azure | promptflow_repo/promptflow/src/promptflow/promptflow/azure/_utils/gerneral.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import jwt
from promptflow.exceptions import ValidationException
def is_arm_id(obj) -> bool:
return isinstance(obj, str) and obj.startswith("azureml://")
def get_token(credential, resource) -> str:
from azure.ai.ml._azure_environments import _resource_to_scopes
azure_ml_scopes = _resource_to_scopes(resource)
token = credential.get_token(*azure_ml_scopes).token
# validate token has aml audience
decoded_token = jwt.decode(
token,
options={"verify_signature": False, "verify_aud": False},
)
if decoded_token.get("aud") != resource:
msg = """AAD token with aml scope could not be fetched using the credentials being used.
Please validate if token with {0} scope can be fetched using credentials provided to PFClient.
Token with {0} scope can be fetched using credentials.get_token({0})
"""
raise ValidationException(
message=msg.format(*azure_ml_scopes),
)
return token
def get_aml_token(credential) -> str:
from azure.ai.ml._azure_environments import _get_aml_resource_id_from_metadata
resource = _get_aml_resource_id_from_metadata()
return get_token(credential, resource)
def get_arm_token(credential) -> str:
from azure.ai.ml._azure_environments import _get_base_url_from_metadata
resource = _get_base_url_from_metadata()
return get_token(credential, resource)
def get_authorization(credential=None) -> str:
token = get_arm_token(credential=credential)
return "Bearer " + token
def get_user_alias_from_credential(credential):
token = get_arm_token(credential=credential)
decode_json = jwt.decode(token, options={"verify_signature": False, "verify_aud": False})
try:
email = decode_json.get("upn", decode_json.get("email", None))
return email.split("@")[0]
except Exception:
# use oid when failed to get upn, e.g. service principal
return decode_json["oid"]
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/types.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from dataclasses import dataclass
class Secret(str):
"""This class is used to hint a parameter is a secret to load."""
def set_secret_name(self, name):
"""Set the secret_name attribute for the Secret instance.
:param name: The name of the secret.
:type name: str
"""
self.secret_name = name
class PromptTemplate(str):
"""This class is used to hint a parameter is a prompt template."""
pass
class FilePath(str):
"""This class is used to hint a parameter is a file path."""
pass
@dataclass
class AssistantDefinition:
"""This class is used to define an assistant definition."""
model: str
instructions: str
tools: list
@staticmethod
def deserialize(data: dict) -> "AssistantDefinition":
return AssistantDefinition(
model=data.get("model", ""),
instructions=data.get("instructions", ""),
tools=data.get("tools", [])
)
def serialize(self):
return {
"model": self.model,
"instructions": self.instructions,
"tools": self.tools,
}
def init_tool_invoker(self):
from promptflow.executor._assistant_tool_invoker import AssistantToolInvoker
return AssistantToolInvoker.init(self.tools)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/trace.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, List, Optional
class TraceType(str, Enum):
"""An enumeration class to represent different types of traces."""
LLM = "LLM"
TOOL = "Tool"
FUNCTION = "Function"
LANGCHAIN = "LangChain"
@dataclass
class Trace:
"""A dataclass that represents a trace of a program execution.
:param name: The name of the trace.
:type name: str
:param type: The type of the trace.
:type type: ~promptflow.contracts.trace.TraceType
:param inputs: The inputs of the trace.
:type inputs: Dict[str, Any]
:param output: The output of the trace, or None if not available.
:type output: Optional[Any]
:param start_time: The timestamp of the start time, or None if not available.
:type start_time: Optional[float]
:param end_time: The timestamp of the end time, or None if not available.
:type end_time: Optional[float]
:param error: The error message of the trace, or None if no error occurred.
:type error: Optional[str]
:param children: The list of child traces, or None if no children.
:type children: Optional[List[Trace]]
:param node_name: The node name of the trace, used for flow level trace, or None if not applicable.
:type node_name: Optional[str]
"""
name: str
type: TraceType
inputs: Dict[str, Any]
output: Optional[Any] = None
start_time: Optional[float] = None # The timestamp of the start time
end_time: Optional[float] = None # The timestamp of the end time
error: Optional[str] = None
children: Optional[List["Trace"]] = None
node_name: Optional[str] = None # The node name of the trace, used for flow level trace
parent_id: str = "" # The parent trace id of the trace
id: str = "" # The trace id
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/run_mode.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from enum import Enum
class RunMode(str, Enum):
"""An enumeration of possible run modes."""
Test = "Test"
SingleNode = "SingleNode"
Batch = "Batch"
@classmethod
def parse(cls, value: str):
"""Parse a string to a RunMode enum value.
:param value: The string to parse.
:type value: str
:return: The corresponding RunMode enum value.
:rtype: ~promptflow.contracts.run_mode.RunMode
:raises ValueError: If the value is not a valid string.
"""
if not isinstance(value, str):
raise ValueError(f"Invalid value type to parse: {type(value)}")
if value == "SingleNode":
return RunMode.SingleNode
elif value == "Batch":
return RunMode.Batch
else:
return RunMode.Test
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/__init__.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/multimedia.py | import base64
import filetype
import hashlib
from typing import Callable, Optional
class PFBytes(bytes):
"""This class is used to represent a bytes object in PromptFlow.
It has all the functionalities of a bytes object,
and also has some additional methods to help with serialization and deserialization.
"""
def __new__(cls, value: bytes, *args, **kwargs):
# Here we must only pass the value to the bytes constructor,
# otherwise we will get a type error that the constructor doesn't take such args.
# See https://docs.python.org/3/reference/datamodel.html#object.__new__
return super().__new__(cls, value)
def __init__(self, value: bytes, mime_type: str, source_url: Optional[str] = None):
# Here the first argument should also be "value", the same as __new__.
# Otherwise we will get error when initialize the object.
super().__init__()
# Use this hash to identify this bytes.
self._hash = hashlib.sha1(value).hexdigest()[:8]
self._mime_type = mime_type.lower()
self._source_url = source_url
@property
def source_url(self):
return self._source_url
def to_base64(self, with_type: bool = False, dict_type: bool = False):
"""Returns the base64 representation of the PFBytes."""
if with_type:
if not dict_type:
return f"data:{self._mime_type};base64," + base64.b64encode(self).decode("utf-8")
return {f"data:{self._mime_type};base64": base64.b64encode(self).decode("utf-8")}
return base64.b64encode(self).decode("utf-8")
class Image(PFBytes):
"""This class is used to represent an image in PromptFlow. It is a subclass of
~promptflow.contracts.multimedia.PFBytes.
"""
def __init__(self, value: bytes, mime_type: str = None, source_url: Optional[str] = None):
if mime_type is None:
mime_type = filetype.guess_mime(value)
if mime_type is None or not mime_type.startswith("image/"):
mime_type = "image/*"
return super().__init__(value, mime_type, source_url)
def __str__(self):
return f"Image({self._hash})"
def __repr__(self) -> str:
return f"Image({self._hash})"
def serialize(self, encoder: Callable = None):
"""Serialize the image to a dictionary."""
if encoder is None:
return self.__str__()
return encoder(self)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/flow.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import logging
import sys
from dataclasses import asdict, dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional
from promptflow._utils.yaml_utils import load_yaml
from promptflow.contracts._errors import FlowDefinitionError
from promptflow.exceptions import ErrorTarget
from .._constants import LANGUAGE_KEY, FlowLanguage
from .._sdk._constants import DEFAULT_ENCODING
from .._utils.dataclass_serializer import serialize
from .._utils.utils import try_import
from ._errors import FailedToImportModule
from .tool import ConnectionType, Tool, ToolType, ValueType
logger = logging.getLogger(__name__)
class InputValueType(Enum):
"""The enum of input value type."""
LITERAL = "Literal"
FLOW_INPUT = "FlowInput"
NODE_REFERENCE = "NodeReference"
FLOW_INPUT_PREFIX = "flow."
FLOW_INPUT_PREFIXES = [FLOW_INPUT_PREFIX, "inputs."] # Use a list for backward compatibility
@dataclass
class InputAssignment:
"""This class represents the assignment of an input value.
:param value: The value of the input assignment.
:type value: Any
:param value_type: The type of the input assignment.
:type value_type: ~promptflow.contracts.flow.InputValueType
:param section: The section of the input assignment, usually the output.
:type section: str
:param property: The property of the input assignment that exists in the section.
:type property: str
"""
value: Any
value_type: InputValueType = InputValueType.LITERAL
section: str = ""
property: str = ""
def serialize(self):
"""Serialize the input assignment to a string."""
if self.value_type == InputValueType.FLOW_INPUT:
return f"${{{FLOW_INPUT_PREFIX}{self.value}}}"
elif self.value_type == InputValueType.NODE_REFERENCE:
if self.property:
return f"${{{self.value}.{self.section}.{self.property}}}"
return f"${{{self.value}.{self.section}}}"
elif ConnectionType.is_connection_value(self.value):
return ConnectionType.serialize_conn(self.value)
return self.value
@staticmethod
def deserialize(value: str) -> "InputAssignment":
"""Deserialize the input assignment from a string.
:param value: The string to be deserialized.
:type value: str
:return: The input assignment constructed from the string.
:rtype: ~promptflow.contracts.flow.InputAssignment
"""
literal_value = InputAssignment(value, InputValueType.LITERAL)
if isinstance(value, str) and value.startswith("$") and len(value) > 2:
value = value[1:]
if value[0] != "{" or value[-1] != "}":
return literal_value
value = value[1:-1]
return InputAssignment.deserialize_reference(value)
return literal_value
@staticmethod
def deserialize_reference(value: str) -> "InputAssignment":
"""Deserialize the reference(including node/flow reference) part of an input assignment.
:param value: The string to be deserialized.
:type value: str
:return: The input assignment of reference types.
:rtype: ~promptflow.contracts.flow.InputAssignment
"""
if FlowInputAssignment.is_flow_input(value):
return FlowInputAssignment.deserialize(value)
return InputAssignment.deserialize_node_reference(value)
@staticmethod
def deserialize_node_reference(data: str) -> "InputAssignment":
"""Deserialize the node reference part of an input assignment.
:param data: The string to be deserialized.
:type data: str
:return: Input assignment of node reference type.
:rtype: ~promptflow.contracts.flow.InputAssignment
"""
value_type = InputValueType.NODE_REFERENCE
if "." not in data:
return InputAssignment(data, value_type, "output")
node_name, port_name = data.split(".", 1)
if "." not in port_name:
return InputAssignment(node_name, value_type, port_name)
section, property = port_name.split(".", 1)
return InputAssignment(node_name, value_type, section, property)
@dataclass
class FlowInputAssignment(InputAssignment):
"""This class represents the assignment of a flow input value.
:param prefix: The prefix of the flow input.
:type prefix: str
"""
prefix: str = FLOW_INPUT_PREFIX
@staticmethod
def is_flow_input(input_value: str) -> bool:
"""Check whether the input value is a flow input.
:param input_value: The input value to be checked.
:type input_value: str
:return: Whether the input value is a flow input.
:rtype: bool
"""
for prefix in FLOW_INPUT_PREFIXES:
if input_value.startswith(prefix):
return True
return False
@staticmethod
def deserialize(value: str) -> "FlowInputAssignment":
"""Deserialize the flow input assignment from a string.
:param value: The string to be deserialized.
:type value: str
:return: The flow input assignment constructed from the string.
:rtype: ~promptflow.contracts.flow.FlowInputAssignment
"""
for prefix in FLOW_INPUT_PREFIXES:
if value.startswith(prefix):
return FlowInputAssignment(
value=value[len(prefix) :], value_type=InputValueType.FLOW_INPUT, prefix=prefix
)
raise ValueError(f"Unexpected flow input value {value}")
class ToolSourceType(str, Enum):
"""The enum of tool source type."""
Code = "code"
Package = "package"
PackageWithPrompt = "package_with_prompt"
@dataclass
class ToolSource:
"""This class represents the source of a tool.
:param type: The type of the tool source.
:type type: ~promptflow.contracts.flow.ToolSourceType
:param tool: The tool of the tool source.
:type tool: str
:param path: The path of the tool source.
:type path: str
"""
type: ToolSourceType = ToolSourceType.Code
tool: Optional[str] = None
path: Optional[str] = None
@staticmethod
def deserialize(data: dict) -> "ToolSource":
"""Deserialize the tool source from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The tool source constructed from the dict.
:rtype: ~promptflow.contracts.flow.ToolSource
"""
result = ToolSource(data.get("type", ToolSourceType.Code.value))
if "tool" in data:
result.tool = data["tool"]
if "path" in data:
result.path = data["path"]
return result
@dataclass
class ActivateCondition:
"""This class represents the activate condition of a node.
:param condition: The condition of the activate condition.
:type condition: ~promptflow.contracts.flow.InputAssignment
:param condition_value: The value of the condition.
:type condition_value: Any
"""
condition: InputAssignment
condition_value: Any
@staticmethod
def deserialize(data: dict, node_name: str = None) -> "ActivateCondition":
"""Deserialize the activate condition from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The activate condition constructed from the dict.
:rtype: ~promptflow.contracts.flow.ActivateCondition
"""
node_name = node_name if node_name else ""
if "when" in data and "is" in data:
if data["when"] is None and data["is"] is None:
logger.warning(
f"The activate config for node {node_name} has empty 'when' and 'is'. "
"Please check your flow yaml to ensure it aligns with your expectations."
)
return ActivateCondition(
condition=InputAssignment.deserialize(data["when"]),
condition_value=data["is"],
)
else:
raise FlowDefinitionError(
message_format=(
"The definition of activate config for node {node_name} "
"is incorrect. Please check your flow yaml and resubmit."
),
node_name=node_name,
)
@dataclass
class Node:
"""This class represents a node in a flow.
:param name: The name of the node.
:type name: str
:param tool: The tool of the node.
:type tool: str
:param inputs: The inputs of the node.
:type inputs: Dict[str, InputAssignment]
:param comment: The comment of the node.
:type comment: str
:param api: The api of the node.
:type api: str
:param provider: The provider of the node.
:type provider: str
:param module: The module of the node.
:type module: str
:param connection: The connection of the node.
:type connection: str
:param aggregation: Whether the node is an aggregation node.
:type aggregation: bool
:param enable_cache: Whether the node enable cache.
:type enable_cache: bool
:param use_variants: Whether the node use variants.
:type use_variants: bool
:param source: The source of the node.
:type source: ~promptflow.contracts.flow.ToolSource
:param type: The tool type of the node.
:type type: ~promptflow.contracts.tool.ToolType
:param activate: The activate condition of the node.
:type activate: ~promptflow.contracts.flow.ActivateCondition
"""
name: str
tool: str
inputs: Dict[str, InputAssignment]
comment: str = ""
api: str = None
provider: str = None
module: str = None # The module of provider to import
connection: str = None
aggregation: bool = False
enable_cache: bool = False
use_variants: bool = False
source: Optional[ToolSource] = None
type: Optional[ToolType] = None
activate: Optional[ActivateCondition] = None
def serialize(self):
"""Serialize the node to a dict.
:return: The dict of the node.
:rtype: dict
"""
data = asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v})
self.inputs = self.inputs or {}
data.update({"inputs": {name: i.serialize() for name, i in self.inputs.items()}})
if self.aggregation:
data["aggregation"] = True
data["reduce"] = True # TODO: Remove this fallback.
if self.type:
data["type"] = self.type.value
return data
@staticmethod
def deserialize(data: dict) -> "Node":
"""Deserialize the node from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The node constructed from the dict.
:rtype: ~promptflow.contracts.flow.Node
"""
node = Node(
name=data.get("name"),
tool=data.get("tool"),
inputs={name: InputAssignment.deserialize(v) for name, v in (data.get("inputs") or {}).items()},
comment=data.get("comment", ""),
api=data.get("api", None),
provider=data.get("provider", None),
module=data.get("module", None),
connection=data.get("connection", None),
aggregation=data.get("aggregation", False) or data.get("reduce", False), # TODO: Remove this fallback.
enable_cache=data.get("enable_cache", False),
use_variants=data.get("use_variants", False),
)
if "source" in data:
node.source = ToolSource.deserialize(data["source"])
if "type" in data:
node.type = ToolType(data["type"])
if "activate" in data:
node.activate = ActivateCondition.deserialize(data["activate"], node.name)
return node
@dataclass
class FlowInputDefinition:
"""This class represents the definition of a flow input.
:param type: The type of the flow input.
:type type: ~promptflow.contracts.tool.ValueType
:param default: The default value of the flow input.
:type default: str
:param description: The description of the flow input.
:type description: str
:param enum: The enum of the flow input.
:type enum: List[str]
:param is_chat_input: Whether the flow input is a chat input.
:type is_chat_input: bool
:param is_chat_history: Whether the flow input is a chat history.
:type is_chat_history: bool
"""
type: ValueType
default: str = None
description: str = None
enum: List[str] = None
is_chat_input: bool = False
is_chat_history: bool = None
def serialize(self):
"""Serialize the flow input definition to a dict.
:return: The dict of the flow input definition.
:rtype: dict
"""
data = {}
data["type"] = self.type.value
if self.default:
data["default"] = str(self.default)
if self.description:
data["description"] = self.description
if self.enum:
data["enum"] = self.enum
if self.is_chat_input:
data["is_chat_input"] = True
if self.is_chat_history:
data["is_chat_history"] = True
return data
@staticmethod
def deserialize(data: dict) -> "FlowInputDefinition":
"""Deserialize the flow input definition from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The flow input definition constructed from the dict.
:rtype: ~promptflow.contracts.flow.FlowInputDefinition
"""
return FlowInputDefinition(
ValueType(data["type"]),
data.get("default", None),
data.get("description", ""),
data.get("enum", []),
data.get("is_chat_input", False),
data.get("is_chat_history", None),
)
@dataclass
class FlowOutputDefinition:
"""This class represents the definition of a flow output.
:param type: The type of the flow output.
:type type: ~promptflow.contracts.tool.ValueType
:param reference: The reference of the flow output.
:type reference: ~promptflow.contracts.flow.InputAssignment
:param description: The description of the flow output.
:type description: str
:param evaluation_only: Whether the flow output is for evaluation only.
:type evaluation_only: bool
:param is_chat_output: Whether the flow output is a chat output.
:type is_chat_output: bool
"""
type: ValueType
reference: InputAssignment
description: str = ""
evaluation_only: bool = False
is_chat_output: bool = False
def serialize(self):
"""Serialize the flow output definition to a dict.
:return: The dict of the flow output definition.
:rtype: dict
"""
data = {}
data["type"] = self.type.value
if self.reference:
data["reference"] = self.reference.serialize()
if self.description:
data["description"] = self.description
if self.evaluation_only:
data["evaluation_only"] = True
if self.is_chat_output:
data["is_chat_output"] = True
return data
@staticmethod
def deserialize(data: dict):
"""Deserialize the flow output definition from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The flow output definition constructed from the dict.
:rtype: ~promptflow.contracts.flow.FlowOutputDefinition
"""
return FlowOutputDefinition(
ValueType(data["type"]),
InputAssignment.deserialize(data.get("reference", "")),
data.get("description", ""),
data.get("evaluation_only", False),
data.get("is_chat_output", False),
)
@dataclass
class NodeVariant:
"""This class represents a node variant.
:param node: The node of the node variant.
:type node: ~promptflow.contracts.flow.Node
:param description: The description of the node variant.
:type description: str
"""
node: Node
description: str = ""
@staticmethod
def deserialize(data: dict) -> "NodeVariant":
"""Deserialize the node variant from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The node variant constructed from the dict.
:rtype: ~promptflow.contracts.flow.NodeVariant
"""
return NodeVariant(
Node.deserialize(data["node"]),
data.get("description", ""),
)
@dataclass
class NodeVariants:
"""This class represents the variants of a node.
:param default_variant_id: The default variant id of the node.
:type default_variant_id: str
:param variants: The variants of the node.
:type variants: Dict[str, NodeVariant]
"""
default_variant_id: str # The default variant id of the node
variants: Dict[str, NodeVariant] # The variants of the node
@staticmethod
def deserialize(data: dict) -> "NodeVariants":
"""Deserialize the node variants from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The node variants constructed from the dict.
:rtype: ~promptflow.contracts.flow.NodeVariants
"""
variants = {}
for variant_id, node in data["variants"].items():
variants[variant_id] = NodeVariant.deserialize(node)
return NodeVariants(default_variant_id=data.get("default_variant_id", ""), variants=variants)
@dataclass
class Flow:
"""This class represents a flow.
:param id: The id of the flow.
:type id: str
:param name: The name of the flow.
:type name: str
:param nodes: The nodes of the flow.
:type nodes: List[Node]
:param inputs: The inputs of the flow.
:type inputs: Dict[str, FlowInputDefinition]
:param outputs: The outputs of the flow.
:type outputs: Dict[str, FlowOutputDefinition]
:param tools: The tools of the flow.
:type tools: List[Tool]
:param node_variants: The node variants of the flow.
:type node_variants: Dict[str, NodeVariants]
:param program_language: The program language of the flow.
:type program_language: str
:param environment_variables: The default environment variables of the flow.
:type environment_variables: Dict[str, object]
"""
id: str
name: str
nodes: List[Node]
inputs: Dict[str, FlowInputDefinition]
outputs: Dict[str, FlowOutputDefinition]
tools: List[Tool]
node_variants: Dict[str, NodeVariants] = None
program_language: str = FlowLanguage.Python
environment_variables: Dict[str, object] = None
def serialize(self):
"""Serialize the flow to a dict.
:return: The dict of the flow.
:rtype: dict
"""
data = {
"id": self.id,
"name": self.name,
"nodes": [n.serialize() for n in self.nodes],
"inputs": {name: i.serialize() for name, i in self.inputs.items()},
"outputs": {name: o.serialize() for name, o in self.outputs.items()},
"tools": [serialize(t) for t in self.tools],
"language": self.program_language,
}
return data
@staticmethod
def _import_requisites(tools, nodes):
"""This function will import tools/nodes required modules to ensure type exists so flow can be executed."""
try:
# Import tool modules to ensure register_builtins & registered_connections executed
for tool in tools:
if tool.module:
try_import(tool.module, f"Import tool {tool.name!r} module {tool.module!r} failed.")
# Import node provider to ensure register_apis executed so that provider & connection exists.
for node in nodes:
if node.module:
try_import(node.module, f"Import node {node.name!r} provider module {node.module!r} failed.")
except Exception as e:
logger.warning("Failed to import modules...")
raise FailedToImportModule(
message=f"Failed to import modules with error: {str(e)}.", target=ErrorTarget.RUNTIME
) from e
@staticmethod
def deserialize(data: dict) -> "Flow":
"""Deserialize the flow from a dict.
:param data: The dict to be deserialized.
:type data: dict
:return: The flow constructed from the dict.
:rtype: ~promptflow.contracts.flow.Flow
"""
tools = [Tool.deserialize(t) for t in data.get("tools") or []]
nodes = [Node.deserialize(n) for n in data.get("nodes") or []]
Flow._import_requisites(tools, nodes)
inputs = data.get("inputs") or {}
outputs = data.get("outputs") or {}
return Flow(
# TODO: Remove this fallback.
data.get("id", data.get("name", "default_flow_id")),
data.get("name", "default_flow"),
nodes,
{name: FlowInputDefinition.deserialize(i) for name, i in inputs.items()},
{name: FlowOutputDefinition.deserialize(o) for name, o in outputs.items()},
tools=tools,
node_variants={name: NodeVariants.deserialize(v) for name, v in (data.get("node_variants") or {}).items()},
program_language=data.get(LANGUAGE_KEY, FlowLanguage.Python),
environment_variables=data.get("environment_variables") or {},
)
def _apply_default_node_variants(self: "Flow"):
self.nodes = [
self._apply_default_node_variant(node, self.node_variants) if node.use_variants else node
for node in self.nodes
]
return self
@staticmethod
def _apply_default_node_variant(node: Node, node_variants: Dict[str, NodeVariants]) -> Node:
if not node_variants:
return node
node_variant = node_variants.get(node.name)
if not node_variant:
return node
default_variant = node_variant.variants.get(node_variant.default_variant_id)
if not default_variant:
return node
default_variant.node.name = node.name
return default_variant.node
@classmethod
def _resolve_working_dir(cls, flow_file: Path, working_dir=None) -> Path:
working_dir = cls._parse_working_dir(flow_file, working_dir)
cls._update_working_dir(working_dir)
return working_dir
@classmethod
def _parse_working_dir(cls, flow_file: Path, working_dir=None) -> Path:
if working_dir is None:
working_dir = Path(flow_file).resolve().parent
working_dir = Path(working_dir).absolute()
return working_dir
@classmethod
def _update_working_dir(cls, working_dir: Path):
sys.path.insert(0, str(working_dir))
@classmethod
def from_yaml(cls, flow_file: Path, working_dir=None) -> "Flow":
"""Load flow from yaml file."""
working_dir = cls._parse_working_dir(flow_file, working_dir)
with open(working_dir / flow_file, "r", encoding=DEFAULT_ENCODING) as fin:
flow_dag = load_yaml(fin)
return Flow._from_dict(flow_dag=flow_dag, working_dir=working_dir)
@classmethod
def _from_dict(cls, flow_dag: dict, working_dir: Path) -> "Flow":
"""Load flow from dict."""
cls._update_working_dir(working_dir)
flow = Flow.deserialize(flow_dag)
flow._set_tool_loader(working_dir)
return flow
@classmethod
def load_env_variables(
cls, flow_file: Path, working_dir=None, environment_variables_overrides: Dict[str, str] = None
) -> Dict[str, str]:
"""
Read flow_environment_variables from flow yaml.
If environment_variables_overrides exists, override yaml level configuration.
Returns the merged environment variables dict.
"""
if Path(flow_file).suffix.lower() != ".yaml":
# The flow_file type of eager flow is .py
return environment_variables_overrides or {}
working_dir = cls._parse_working_dir(flow_file, working_dir)
with open(working_dir / flow_file, "r", encoding=DEFAULT_ENCODING) as fin:
flow_dag = load_yaml(fin)
flow = Flow.deserialize(flow_dag)
return flow.get_environment_variables_with_overrides(
environment_variables_overrides=environment_variables_overrides
)
def get_environment_variables_with_overrides(
self, environment_variables_overrides: Dict[str, str] = None
) -> Dict[str, str]:
environment_variables = {
k: (json.dumps(v) if isinstance(v, (dict, list)) else str(v)) for k, v in self.environment_variables.items()
}
if environment_variables_overrides is not None:
for k, v in environment_variables_overrides.items():
environment_variables[k] = v
return environment_variables
def _set_tool_loader(self, working_dir):
package_tool_keys = [node.source.tool for node in self.nodes if node.source and node.source.tool]
from promptflow._core.tools_manager import ToolLoader
# TODO: consider refactor this. It will raise an error if promptflow-tools
# is not installed even for csharp flow.
self._tool_loader = ToolLoader(working_dir, package_tool_keys)
def _apply_node_overrides(self, node_overrides):
"""Apply node overrides to update the nodes in the flow.
Example:
node_overrides = {
"llm_node1.connection": "some_connection",
"python_node1.some_key": "some_value",
}
We will update the connection field of llm_node1 and the input value of python_node1.some_key.
"""
if not node_overrides:
return self
# We don't do detailed error handling here, since it should never fail
for key, value in node_overrides.items():
node_name, input_name = key.split(".")
node = self.get_node(node_name)
if node is None:
raise ValueError(f"Cannot find node {node_name} in flow {self.name}")
# For LLM node, here we override the connection field in node
if node.connection and input_name == "connection":
node.connection = value
# Other scenarios we override the input value of the inputs
else:
node.inputs[input_name] = InputAssignment(value=value)
return self
def has_aggregation_node(self):
"""Return whether the flow has aggregation node."""
return any(n.aggregation for n in self.nodes)
def get_node(self, node_name):
"""Return the node with the given name."""
return next((n for n in self.nodes if n.name == node_name), None)
def get_tool(self, tool_name):
"""Return the tool with the given name."""
return next((t for t in self.tools if t.name == tool_name), None)
def is_reduce_node(self, node_name):
"""Return whether the node is a reduce node."""
node = next((n for n in self.nodes if n.name == node_name), None)
return node is not None and node.aggregation
def is_normal_node(self, node_name):
"""Return whether the node is a normal node."""
node = next((n for n in self.nodes if n.name == node_name), None)
return node is not None and not node.aggregation
def is_llm_node(self, node):
"""Given a node, return whether it uses LLM tool."""
return node.type == ToolType.LLM
def is_referenced_by_flow_output(self, node):
"""Given a node, return whether it is referenced by output."""
return any(
output
for output in self.outputs.values()
if all(
(
output.reference.value_type == InputValueType.NODE_REFERENCE,
output.reference.value == node.name,
)
)
)
def is_node_referenced_by(self, node: Node, other_node: Node):
"""Given two nodes, return whether the first node is referenced by the second node."""
return other_node.inputs and any(
input
for input in other_node.inputs.values()
if input.value_type == InputValueType.NODE_REFERENCE and input.value == node.name
)
def is_referenced_by_other_node(self, node):
"""Given a node, return whether it is referenced by other node."""
return any(flow_node for flow_node in self.nodes if self.is_node_referenced_by(node, flow_node))
def is_chat_flow(self):
"""Return whether the flow is a chat flow."""
chat_input_name = self.get_chat_input_name()
return chat_input_name is not None
def get_chat_input_name(self):
"""Return the name of the chat input."""
return next((name for name, i in self.inputs.items() if i.is_chat_input), None)
def get_chat_output_name(self):
"""Return the name of the chat output."""
return next((name for name, o in self.outputs.items() if o.is_chat_output), None)
def _get_connection_name_from_tool(self, tool: Tool, node: Node):
connection_names = {}
value_types = set({v.value for v in ValueType.__members__.values()})
for k, v in tool.inputs.items():
input_type = [typ.value if isinstance(typ, Enum) else typ for typ in v.type]
if all(typ.lower() in value_types for typ in input_type):
# All type is value type, the key is not a possible connection key.
continue
input_assignment = node.inputs.get(k)
# Add literal node assignment values to results, skip node reference
if isinstance(input_assignment, InputAssignment) and input_assignment.value_type == InputValueType.LITERAL:
connection_names[k] = input_assignment.value
return connection_names
def get_connection_names(self):
"""Return connection names."""
connection_names = set({})
nodes = [
self._apply_default_node_variant(node, self.node_variants) if node.use_variants else node
for node in self.nodes
]
for node in nodes:
if node.connection:
connection_names.add(node.connection)
continue
if node.type == ToolType.PROMPT or node.type == ToolType.LLM:
continue
logger.debug(f"Try loading connection names for node {node.name}.")
tool = self.get_tool(node.tool) or self._tool_loader.load_tool_for_node(node)
if tool:
node_connection_names = list(self._get_connection_name_from_tool(tool, node).values())
else:
node_connection_names = []
if node_connection_names:
logger.debug(f"Connection names of node {node.name}: {node_connection_names}")
else:
logger.debug(f"Node {node.name} doesn't reference any connection.")
connection_names.update(node_connection_names)
return set({item for item in connection_names if item})
def get_connection_input_names_for_node(self, node_name):
"""Return connection input names."""
node = self.get_node(node_name)
if node and node.use_variants:
node = self._apply_default_node_variant(node, self.node_variants)
# Ignore Prompt node and LLM node, due to they do not have connection inputs.
if not node or node.type == ToolType.PROMPT or node.type == ToolType.LLM:
return []
tool = self.get_tool(node.tool) or self._tool_loader.load_tool_for_node(node)
if tool:
return list(self._get_connection_name_from_tool(tool, node).keys())
return []
def _replace_with_variant(self, variant_node: Node, variant_tools: list):
for index, node in enumerate(self.nodes):
if node.name == variant_node.name:
self.nodes[index] = variant_node
break
self.tools = self.tools + variant_tools
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/_errors.py | from promptflow.exceptions import UserErrorException
class FailedToImportModule(UserErrorException):
pass
class FlowDefinitionError(UserErrorException):
pass
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/_run_management.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
from dataclasses import dataclass
from typing import Any, Dict, List, Optional
from promptflow._sdk._constants import VIS_JS_BUNDLE_FILENAME
@dataclass
class RunDetail:
flow_runs: List[dict]
node_runs: List[dict]
@dataclass
class RunMetadata:
name: str
display_name: str
create_time: str
flow_path: str
output_path: str
tags: Optional[List[Dict[str, str]]]
lineage: Optional[str]
metrics: Optional[Dict[str, Any]]
dag: Optional[str]
flow_tools_json: Optional[dict]
mode: Optional[str] = ""
@dataclass
class VisualizationConfig:
# use camel name here to fit contract requirement from js
availableIDEList: List[str]
@dataclass
class RunVisualization:
detail: List[RunDetail]
metadata: List[RunMetadata]
config: List[VisualizationConfig]
@dataclass
class VisualizationRender:
data: dict
js_path: str = VIS_JS_BUNDLE_FILENAME
def __post_init__(self):
self.data = json.dumps(json.dumps(self.data)) # double json.dumps to match JS requirements
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/tool.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import logging
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Any, Dict, List, Optional, Type, TypeVar
from promptflow._constants import CONNECTION_NAME_PROPERTY
from .multimedia import Image
from .types import AssistantDefinition, FilePath, PromptTemplate, Secret
logger = logging.getLogger(__name__)
T = TypeVar("T", bound="Enum")
def _deserialize_enum(cls: Type[T], val) -> T:
if not all(isinstance(i.value, str) for i in cls):
return val
typ = next((i for i in cls if val.lower() == i.value.lower()), None)
# Keep string value for unknown type, as they may be resolved later after some requisites imported.
# Type resolve will be ensured in 'ensure_node_inputs_type' before execution.
return typ if typ else val
class ValueType(str, Enum):
"""Value types."""
INT = "int"
DOUBLE = "double"
BOOL = "bool"
STRING = "string"
SECRET = "secret"
PROMPT_TEMPLATE = "prompt_template"
LIST = "list"
OBJECT = "object"
FILE_PATH = "file_path"
IMAGE = "image"
ASSISTANT_DEFINITION = "assistant_definition"
@staticmethod
def from_value(t: Any) -> "ValueType":
"""Get :class:`~promptflow.contracts.tool.ValueType` by value.
:param t: The value needs to get its :class:`~promptflow.contracts.tool.ValueType`
:type t: Any
:return: The :class:`~promptflow.contracts.tool.ValueType` of the given value
:rtype: ~promptflow.contracts.tool.ValueType
"""
if isinstance(t, Secret):
return ValueType.SECRET
if isinstance(t, PromptTemplate):
return ValueType.PROMPT_TEMPLATE
if isinstance(t, bool):
return ValueType.BOOL
if isinstance(t, int):
return ValueType.INT
if isinstance(t, float):
return ValueType.DOUBLE
# FilePath is a subclass of str, so it must be checked before str
if isinstance(t, FilePath):
return ValueType.FILE_PATH
if isinstance(t, str):
return ValueType.STRING
if isinstance(t, list):
return ValueType.LIST
if isinstance(t, AssistantDefinition):
return ValueType.ASSISTANT_DEFINITION
return ValueType.OBJECT
@staticmethod
def from_type(t: type) -> "ValueType":
"""Get :class:`~promptflow.contracts.tool.ValueType` by type.
:param t: The type needs to get its :class:`~promptflow.contracts.tool.ValueType`
:type t: type
:return: The :class:`~promptflow.contracts.tool.ValueType` of the given type
:rtype: ~promptflow.contracts.tool.ValueType
"""
if t == int:
return ValueType.INT
if t == float:
return ValueType.DOUBLE
if t == bool:
return ValueType.BOOL
if t == str:
return ValueType.STRING
if t == list:
return ValueType.LIST
if t == Secret:
return ValueType.SECRET
if t == PromptTemplate:
return ValueType.PROMPT_TEMPLATE
if t == FilePath:
return ValueType.FILE_PATH
if t == Image:
return ValueType.IMAGE
if t == AssistantDefinition:
return ValueType.ASSISTANT_DEFINITION
return ValueType.OBJECT
def parse(self, v: Any) -> Any: # noqa: C901
"""Parse value to the given :class:`~promptflow.contracts.tool.ValueType`.
:param v: The value needs to be parsed to the given :class:`~promptflow.contracts.tool.ValueType`
:type v: Any
:return: The parsed value
:rtype: Any
"""
if self == ValueType.INT:
return int(v)
if self == ValueType.DOUBLE:
return float(v)
if self == ValueType.BOOL:
if isinstance(v, bool):
return v
if isinstance(v, str) and v.lower() in {"true", "false"}:
return v.lower() == "true"
raise ValueError(f"Invalid boolean value {v!r}")
if self == ValueType.STRING:
return str(v)
if self == ValueType.LIST:
if isinstance(v, str):
v = json.loads(v)
if not isinstance(v, list):
raise ValueError(f"Invalid list value {v!r}")
return v
if self == ValueType.OBJECT:
if isinstance(v, str):
try:
return json.loads(v)
except Exception:
# Ignore the exception since it might really be a string
pass
# TODO: parse other types
return v
class ConnectionType:
"""This class provides methods to interact with connection types."""
@staticmethod
def get_connection_class(type_name: str) -> Optional[type]:
"""Get connection type by type name.
:param type_name: The type name of the connection
:type type_name: str
:return: The connection type
:rtype: type
"""
# Note: This function must be called after ensure_flow_valid, as required modules may not be imported yet,
# and connections may not be registered yet.
from promptflow._core.tools_manager import connections
if not isinstance(type_name, str):
return None
return connections.get(type_name)
@staticmethod
def is_connection_class_name(type_name: str) -> bool:
"""Check if the given type name is a connection type.
:param type_name: The type name of the connection
:type type_name: str
:return: Whether the given type name is a connection type
:rtype: bool
"""
return ConnectionType.get_connection_class(type_name) is not None
@staticmethod
def is_connection_value(val: Any) -> bool:
"""Check if the given value is a connection.
:param val: The value to check
:type val: Any
:return: Whether the given value is a connection
:rtype: bool
"""
# Note: This function must be called after ensure_flow_valid, as required modules may not be imported yet,
# and connections may not be registered yet.
from promptflow._core.tools_manager import connections
val = type(val) if not isinstance(val, type) else val
return val in connections.values() or ConnectionType.is_custom_strong_type(val)
@staticmethod
def is_custom_strong_type(val: Any) -> bool:
"""Check if the given value is a custom strong type connection.
:param val: The value to check
:type val: Any
:return: Whether the given value is a custom strong type
:rtype: bool
"""
from promptflow.connections import CustomStrongTypeConnection
val = type(val) if not isinstance(val, type) else val
try:
return issubclass(val, CustomStrongTypeConnection)
except TypeError as e:
# TypeError is not expected to happen, but if it does, we will log it for debugging and return False.
# The try-except block cannot be confidently removed due to the uncertainty of TypeError that may occur.
logger.warning(f"Failed to check if {val} is a custom strong type: {e}")
return False
@staticmethod
def serialize_conn(connection: Any) -> dict:
"""Serialize the given connection.
:param connection: The connection to serialize
:type connection: Any
:return: A dictionary representation of the connection.
:rtype: dict
"""
if not ConnectionType.is_connection_value(connection):
raise ValueError(f"Invalid connection value {connection!r}")
return getattr(connection, CONNECTION_NAME_PROPERTY, type(connection).__name__)
class ToolType(str, Enum):
"""Tool types."""
LLM = "llm"
PYTHON = "python"
CSHARP = "csharp"
PROMPT = "prompt"
_ACTION = "action"
CUSTOM_LLM = "custom_llm"
@dataclass
class InputDefinition:
"""Input definition."""
type: List[ValueType]
default: str = None
description: str = None
enum: List[str] = None
# Param 'custom_type' is currently used for inputs of custom strong type connection.
# For a custom strong type connection input, the type should be 'CustomConnection',
# while the custom_type should be the custom strong type connection class name.
custom_type: List[str] = None
def serialize(self) -> dict:
"""Serialize input definition to dict.
:return: The serialized input definition
:rtype: dict
"""
data = {}
data["type"] = [t.value for t in self.type]
if len(self.type) == 1:
data["type"] = self.type[0].value
if self.default:
data["default"] = str(self.default)
if self.description:
data["description"] = self.description
if self.enum:
data["enum"] = self.enum
if self.custom_type:
data["custom_type"] = self.custom_type
return data
@staticmethod
def deserialize(data: dict) -> "InputDefinition":
"""Deserialize dict to input definition.
:param data: The dict needs to be deserialized
:type data: dict
:return: The deserialized input definition
:rtype: ~promptflow.contracts.tool.InputDefinition
"""
def _deserialize_type(v):
v = [v] if not isinstance(v, list) else v
# Note: Connection type will be keep as string value,
# as they may be resolved later after some requisites imported.
return [_deserialize_enum(ValueType, item) for item in v]
return InputDefinition(
_deserialize_type(data["type"]),
data.get("default", ""),
data.get("description", ""),
data.get("enum", []),
data.get("custom_type", []),
)
def to_flow_input_definition(self):
""" Used for eager flow to convert input definition to flow input definition.
"""
from .flow import FlowInputDefinition
# TODO: To align with tool resolver we respect the first type if multiple types are provided,
# still need more discussion on this. Should we raise error if multiple types are provided?
return FlowInputDefinition(
type=self.type[0], default=self.default, description=self.description, enum=self.enum
)
@dataclass
class OutputDefinition:
"""Output definition."""
type: List["ValueType"]
description: str = ""
is_property: bool = False
def serialize(self) -> dict:
"""Serialize output definition to dict.
:return: The serialized output definition
:rtype: dict
"""
data = {"type": [t.value for t in self.type], "is_property": self.is_property}
if len(data["type"]) == 1:
data["type"] = data["type"][0]
if self.description:
data["description"] = self.description
return data
@staticmethod
def deserialize(data: dict) -> "OutputDefinition":
"""Deserialize dict to output definition.
:param data: The dict needs to be deserialized
:type data: dict
:return: The deserialized output definition
:rtype: ~promptflow.contracts.tool.OutputDefinition
"""
return OutputDefinition(
[ValueType(t) for t in data["type"]] if isinstance(data["type"], list) else [ValueType(data["type"])],
data.get("description", ""),
data.get("is_property", False),
)
@dataclass
class Tool:
"""Tool definition.
:param name: The name of the tool
:type name: str
:param type: The type of the tool
:type type: ~promptflow.contracts.tool.ToolType
:param inputs: The inputs of the tool
:type inputs: Dict[str, ~promptflow.contracts.tool.InputDefinition]
:param outputs: The outputs of the tool
:type outputs: Optional[Dict[str, ~promptflow.contracts.tool.OutputDefinition]]
:param description: The description of the tool
:type description: Optional[str]
:param module: The module of the tool
:type module: Optional[str]
:param class_name: The class name of the tool
:type class_name: Optional[str]
:param source: The source of the tool
:type source: Optional[str]
:param code: The code of the tool
:type code: Optional[str]
:param function: The function of the tool
:type function: Optional[str]
:param connection_type: The connection type of the tool
:type connection_type: Optional[List[str]]
:param is_builtin: Whether the tool is a built-in tool
:type is_builtin: Optional[bool]
:param stage: The stage of the tool
:type stage: Optional[str]
:param enable_kwargs: Whether to enable kwargs, only available for customer python tool
:type enable_kwargs: Optional[bool]
:param deprecated_tools: A list of old tool IDs that are mapped to the current tool ID.
:type deprecated_tools: Optional[List[str]]
"""
name: str
type: ToolType
inputs: Dict[str, InputDefinition]
outputs: Optional[Dict[str, OutputDefinition]] = None
description: Optional[str] = None
module: Optional[str] = None
class_name: Optional[str] = None
source: Optional[str] = None
code: Optional[str] = None
function: Optional[str] = None
connection_type: Optional[List[str]] = None
is_builtin: Optional[bool] = None
stage: Optional[str] = None
enable_kwargs: Optional[bool] = False
deprecated_tools: Optional[List[str]] = None
def serialize(self) -> dict:
"""Serialize tool to dict and skip None fields.
:return: The serialized tool
:rtype: dict
"""
data = asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None and k != "outputs"})
if not self.type == ToolType._ACTION:
return data
# Pop unused field for action
skipped_fields = ["type", "inputs", "outputs"]
return {k: v for k, v in data.items() if k not in skipped_fields}
@staticmethod
def deserialize(data: dict) -> "Tool":
"""Deserialize dict to tool.
:param data: The dict needs to be deserialized
:type data: dict
:return: The deserialized tool
:rtype: ~promptflow.contracts.tool.Tool
"""
return Tool(
name=data["name"],
description=data.get("description", ""),
type=_deserialize_enum(ToolType, data["type"]),
inputs={k: InputDefinition.deserialize(i) for k, i in data.get("inputs", {}).items()},
outputs={k: OutputDefinition.deserialize(o) for k, o in data.get("outputs", {}).items()},
module=data.get("module"),
class_name=data.get("class_name"),
source=data.get("source"),
code=data.get("code"),
function=data.get("function"),
connection_type=data.get("connection_type"),
is_builtin=data.get("is_builtin"),
stage=data.get("stage"),
enable_kwargs=data.get("enable_kwargs", False),
deprecated_tools=data.get("deprecated_tools"),
)
def _require_connection(self) -> bool:
return self.type is ToolType.LLM or isinstance(self.connection_type, list) and len(self.connection_type) > 0
class ToolFuncCallScenario(str, Enum):
GENERATED_BY = "generated_by"
REVERSE_GENERATED_BY = "reverse_generated_by"
DYNAMIC_LIST = "dynamic_list"
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/contracts/run_info.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Mapping, Optional
from dateutil import parser
class Status(Enum):
"""An enumeration class for different types of run status."""
Running = "Running"
Preparing = "Preparing"
Completed = "Completed"
Failed = "Failed"
Bypassed = "Bypassed"
Canceled = "Canceled"
NotStarted = "NotStarted"
CancelRequested = "CancelRequested"
@staticmethod
def is_terminated(status):
"""Check if a given status is terminated.
:param status: The status to be checked
:type status: str or :class:`Status`
:return: True if the status is terminated, False otherwise
:rtype: bool
"""
if isinstance(status, Status):
status = status.value
return status in {s.value for s in {Status.Completed, Status.Failed, Status.Bypassed, Status.Canceled}}
@dataclass
class RunInfo:
"""A dataclass representing the run information.
:param node: Node name
:type node: str
:param flow_run_id: The id of the flow run
:type flow_run_id: str
:param run_id: The id of the run, which equals ``flow_run_id:step_run_id``
:type run_id: str
:param status: Status of the run
:type status: ~promptflow.contracts.run_info.Status
:param inputs: List of inputs for the run
:type inputs: list
:param output: Output of the run
:type output: object
:param metrics: Metrics of the run
:type metrics: Dict[str, Any]
:param error: Errors occurred during the run
:type error: Dict[str, Any]
:param parent_run_id: Parent run id
:type parent_run_id: str
:param start_time: Start time of the run
:type start_time: datetime
:param end_time: End time of the run
:type end_time: datetime
:param index: Index of the run
:type index: Optional[int]
:param api_calls: API calls made during the run
:type api_calls: Optional[List[Dict[str, Any]]]
:param variant_id: Variant id of the run
:type variant_id: Optional[str]
:param cached_run_id: Cached run id
:type cached_run_id: Optional[str]
:param cached_flow_run_id: Cached flow run id
:type cached_flow_run_id: Optional[str]
:param logs: Logs of the run
:type logs: Optional[Dict[str, str]]
:param system_metrics: System metrics of the run
:type system_metrics: Optional[Dict[str, Any]]
:param result: Result of the run
:type result: Optional[object]
"""
node: str
flow_run_id: str
run_id: str
status: Status
inputs: Mapping[str, Any]
output: object
metrics: Dict[str, Any]
error: Dict[str, Any]
parent_run_id: str
start_time: datetime
end_time: datetime
index: Optional[int] = None
api_calls: Optional[List[Dict[str, Any]]] = None
variant_id: str = ""
cached_run_id: str = None
cached_flow_run_id: str = None
logs: Optional[Dict[str, str]] = None
system_metrics: Dict[str, Any] = None
result: object = None
@staticmethod
def deserialize(data: dict) -> "RunInfo":
"""Deserialize the RunInfo from a dict."""
run_info = RunInfo(
node=data.get("node"),
flow_run_id=data.get("flow_run_id"),
run_id=data.get("run_id"),
status=Status(data.get("status")),
inputs=data.get("inputs", None),
output=data.get("output", None),
metrics=data.get("metrics", None),
error=data.get("error", None),
parent_run_id=data.get("parent_run_id", None),
start_time=parser.parse(data.get("start_time")).replace(tzinfo=None),
end_time=parser.parse(data.get("end_time")).replace(tzinfo=None),
index=data.get("index", None),
api_calls=data.get("api_calls", None),
variant_id=data.get("variant_id", ""),
cached_run_id=data.get("cached_run_id", None),
cached_flow_run_id=data.get("cached_flow_run_id", None),
logs=data.get("logs", None),
system_metrics=data.get("system_metrics", None),
result=data.get("result", None),
)
return run_info
@dataclass
class FlowRunInfo:
"""A dataclass representing the run information.
:param run_id: The id of the run, which equals ``flow_run_id:child_flow_run_id``
:type run_id: str
:param status: Status of the flow run
:type status: ~promptflow.contracts.run_info.Status
:param error: Errors occurred during the flow run
:type error: Dict[str, Any]
:param inputs: Inputs for the flow run
:type inputs: object
:param output: Output of the flow run
:type output: object
:param metrics: Metrics of the flow run
:type metrics: Dict[str, Any]
:param request: Request made for the flow run
:type request: object
:param parent_run_id: Parent run id of the flow run
:type parent_run_id: str
:param root_run_id: Root run id of the flow run
:type root_run_id: str
:param source_run_id: The run id of the run that triggered the flow run
:type source_run_id: str
:param flow_id: Flow id of the flow run
:type flow_id: str
:param start_time: Start time of the flow run
:type start_time: datetime
:param end_time: End time of the flow run
:type end_time: datetime
:param index: Index of the flow run (used for bulk test mode)
:type index: Optional[int]
:param api_calls: API calls made during the flow run
:type api_calls: Optional[List[Dict[str, Any]]]
:param variant_id: Variant id of the flow run
:type variant_id: Optional[str]
:param name: Name of the flow run
:type name: Optional[str]
:param description: Description of the flow run
:type description: Optional[str]
:param tags: Tags of the flow run
:type tags: Optional[Dict[str, str]]
:param system_metrics: System metrics of the flow run
:type system_metrics: Optional[Dict[str, Any]]
:param result: Result of the flow run
:type result: Optional[object]
:param upload_metrics: Flag indicating whether to upload metrics for the flow run
:type upload_metrics: Optional[bool]
"""
run_id: str
status: Status
error: object
inputs: object
output: object
metrics: Dict[str, Any]
request: object
parent_run_id: str
root_run_id: str
source_run_id: str
flow_id: str
start_time: datetime
end_time: datetime
index: Optional[int] = None
api_calls: Optional[List[Dict[str, Any]]] = None
variant_id: str = ""
name: str = ""
description: str = ""
tags: Optional[Mapping[str, str]] = None
system_metrics: Dict[str, Any] = None
result: object = None
upload_metrics: bool = False # only set as true for root runs in bulk test mode and evaluation mode
@staticmethod
def deserialize(data: dict) -> "FlowRunInfo":
"""Deserialize the FlowRunInfo from a dict."""
flow_run_info = FlowRunInfo(
run_id=data.get("run_id"),
status=Status(data.get("status")),
error=data.get("error", None),
inputs=data.get("inputs", None),
output=data.get("output", None),
metrics=data.get("metrics", None),
request=data.get("request", None),
parent_run_id=data.get("parent_run_id", None),
root_run_id=data.get("root_run_id", None),
source_run_id=data.get("source_run_id", None),
flow_id=data.get("flow_id"),
start_time=parser.parse(data.get("start_time")).replace(tzinfo=None),
end_time=parser.parse(data.get("end_time")).replace(tzinfo=None),
index=data.get("index", None),
api_calls=data.get("api_calls", None),
variant_id=data.get("variant_id", ""),
name=data.get("name", ""),
description=data.get("description", ""),
tags=data.get("tags", None),
system_metrics=data.get("system_metrics", None),
result=data.get("result", None),
upload_metrics=data.get("upload_metrics", False),
)
return flow_run_info
@staticmethod
def create_with_error(start_time, inputs, index, run_id, error):
return FlowRunInfo(
run_id=run_id,
status=Status.Failed,
error=error,
inputs=inputs,
output=None,
metrics=None,
request=None,
parent_run_id=run_id,
root_run_id=run_id,
source_run_id=run_id,
flow_id="default_flow_id",
start_time=start_time,
end_time=datetime.utcnow(),
index=index,
)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/flow_executor.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import asyncio
import copy
import functools
import inspect
import os
import uuid
from pathlib import Path
from threading import current_thread
from types import GeneratorType
from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple
from promptflow._constants import LINE_NUMBER_KEY
from promptflow._core._errors import NotSupported, UnexpectedError
from promptflow._core.cache_manager import AbstractCacheManager
from promptflow._core.flow_execution_context import FlowExecutionContext
from promptflow._core.metric_logger import add_metric_logger, remove_metric_logger
from promptflow._core.openai_injector import inject_openai_api
from promptflow._core.operation_context import OperationContext
from promptflow._core.run_tracker import RunTracker
from promptflow._core.tool import STREAMING_OPTION_PARAMETER_ATTR
from promptflow._core.tools_manager import ToolsManager
from promptflow._utils.context_utils import _change_working_dir
from promptflow._utils.execution_utils import (
apply_default_value_for_input,
collect_lines,
get_aggregation_inputs_properties,
)
from promptflow._utils.logger_utils import flow_logger, logger
from promptflow._utils.multimedia_utils import (
load_multimedia_data,
load_multimedia_data_recursively,
persist_multimedia_data,
)
from promptflow._utils.utils import get_int_env_var, transpose
from promptflow._utils.yaml_utils import load_yaml
from promptflow.contracts.flow import Flow, FlowInputDefinition, InputAssignment, InputValueType, Node
from promptflow.contracts.run_info import FlowRunInfo, Status
from promptflow.contracts.run_mode import RunMode
from promptflow.exceptions import PromptflowException
from promptflow.executor import _input_assignment_parser
from promptflow.executor._async_nodes_scheduler import AsyncNodesScheduler
from promptflow.executor._errors import (
InvalidFlowFileError,
NodeOutputNotFound,
OutputReferenceNotExist,
SingleNodeValidationError,
)
from promptflow.executor._flow_nodes_scheduler import (
DEFAULT_CONCURRENCY_BULK,
DEFAULT_CONCURRENCY_FLOW,
FlowNodesScheduler,
)
from promptflow.executor._result import AggregationResult, LineResult
from promptflow.executor._tool_resolver import ToolResolver
from promptflow.executor.flow_validator import FlowValidator
from promptflow.storage import AbstractRunStorage
from promptflow.storage._run_storage import DefaultRunStorage
class FlowExecutor:
"""This class is used to execute a single flow for different inputs.
:param flow: The flow to be executed.
:type flow: ~promptflow.contracts.flow.Flow
:param connections: The connections to be used for the flow.
:type connections: dict
:param run_tracker: The run tracker to be used for the flow.
:type run_tracker: ~promptflow._core.run_tracker.RunTracker
:param cache_manager: The cache manager to be used for the flow.
:type cache_manager: ~promptflow._core.cache_manager.AbstractCacheManager
:param loaded_tools: The loaded tools to be used for the flow.
:type loaded_tools: Mapping[str, Callable]
:param worker_count: The number of workers to be used for the flow. Default is 16.
:type worker_count: Optional[int]
:param raise_ex: Whether to raise exceptions or not. Default is False.
:type raise_ex: Optional[bool]
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param line_timeout_sec: The line timeout in seconds to be used for the flow. Default is LINE_TIMEOUT_SEC.
:type line_timeout_sec: Optional[int]
:param flow_file: The flow file to be used for the flow. Default is None.
:type flow_file: Optional[Path]
"""
def __init__(
self,
flow: Flow,
connections: dict,
run_tracker: RunTracker,
cache_manager: AbstractCacheManager,
loaded_tools: Mapping[str, Callable],
*,
entry: Optional[str] = None,
raise_ex: bool = False,
working_dir=None,
line_timeout_sec=None,
flow_file=None,
):
"""Initialize a FlowExecutor object.
:param flow: The Flow object to execute.
:type flow: ~promptflow.contracts.flow.Flow
:param connections: The connections between nodes in the Flow.
:type connections: dict
:param run_tracker: The RunTracker object to track the execution of the Flow.
:type run_tracker: ~promptflow._core.run_tracker.RunTracker
:param cache_manager: The AbstractCacheManager object to manage caching of results.
:type cache_manager: ~promptflow._core.cache_manager.AbstractCacheManager
:param loaded_tools: A mapping of tool names to their corresponding functions.
:type loaded_tools: Mapping[str, Callable]
:param raise_ex: Whether to raise an exception if an error occurs during execution.
:type raise_ex: bool
:param working_dir: The working directory to use for execution.
:type working_dir: str or None
:param line_timeout_sec: The maximum time to wait for a line of output from a node.
:type line_timeout_sec: int or None
:param flow_file: The path to the file containing the Flow definition.
:type flow_file: str or None
"""
# Inject OpenAI API to make sure traces and headers injection works and
# update OpenAI API configs from environment variables.
inject_openai_api()
self._flow = flow
self._flow_id = flow.id or str(uuid.uuid4())
self._connections = connections
self._aggregation_inputs_references = get_aggregation_inputs_properties(flow)
self._aggregation_nodes = {node.name for node in self._flow.nodes if node.aggregation}
self._run_tracker = run_tracker
self._cache_manager = cache_manager
self._loaded_tools = loaded_tools
self._working_dir = working_dir
self._line_timeout_sec = line_timeout_sec or get_int_env_var("PF_LINE_TIMEOUT_SEC")
self._flow_file = flow_file
try:
self._tools_manager = ToolsManager(loaded_tools)
tool_to_meta = {tool.name: tool for tool in flow.tools}
custom_tools = {
node.name: self._tools_manager._load_custom_tool(tool_to_meta[node.tool], node.name)
for node in flow.nodes
if not self._tools_manager.loaded(node.name)
}
self._tools_manager.load_tools(custom_tools)
except PromptflowException as e:
# For PromptflowException, we don't wrap it, because need generate ErrorResponse by inner exception.
# Will try to find one common way to handle this case.
raise e
except Exception as e:
raise ValueError(f"Failed to load custom tools for flow due to exception:\n {e}.") from e
for node in flow.nodes:
self._tools_manager.assert_loaded(node.name)
self._entry = entry
self._raise_ex = raise_ex
self._log_interval = 60
self._processing_idx = None
self._completed_idx = None
# TODO: Improve the experience about configuring node concurrency.
self._node_concurrency = DEFAULT_CONCURRENCY_BULK
@classmethod
def create(
cls,
flow_file: Path,
connections: dict,
working_dir: Optional[Path] = None,
*,
entry: Optional[str] = None,
storage: Optional[AbstractRunStorage] = None,
raise_ex: bool = True,
node_override: Optional[Dict[str, Dict[str, Any]]] = None,
line_timeout_sec: Optional[int] = None,
) -> "FlowExecutor":
"""Create a new instance of FlowExecutor.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param connections: The connections to be used for the flow.
:type connections: dict
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param func: The function to be used for the flow if .py is provided. Default is None.
:type func: Optional[str]
:param storage: The storage to be used for the flow. Default is None.
:type storage: Optional[~promptflow.storage.AbstractRunStorage]
:param raise_ex: Whether to raise exceptions or not. Default is True.
:type raise_ex: Optional[bool]
:param node_override: The node overrides to be used for the flow. Default is None.
:type node_override: Optional[Dict[str, Dict[str, Any]]]
:param line_timeout_sec: The line timeout in seconds to be used for the flow. Default is LINE_TIMEOUT_SEC.
:type line_timeout_sec: Optional[int]
:return: A new instance of FlowExecutor.
:rtype: ~promptflow.executor.flow_executor.FlowExecutor
"""
if cls._is_eager_flow_yaml(flow_file, working_dir):
if Path(flow_file).suffix.lower() in [".yml", ".yaml"]:
entry, path = cls._parse_eager_flow_yaml(flow_file, working_dir)
flow_file = Path(path)
from ._script_executor import ScriptExecutor
return ScriptExecutor(
flow_file=flow_file,
entry=entry,
working_dir=working_dir,
storage=storage,
)
elif Path(flow_file).suffix.lower() in [".yml", ".yaml"]:
flow = Flow.from_yaml(flow_file, working_dir=working_dir)
return cls._create_from_flow(
flow_file=flow_file,
flow=flow,
connections=connections,
working_dir=working_dir,
entry=entry,
storage=storage,
raise_ex=raise_ex,
node_override=node_override,
line_timeout_sec=line_timeout_sec,
)
else:
raise InvalidFlowFileError(message_format="Unsupported flow file type: {flow_file}.", flow_file=flow_file)
@classmethod
def _create_from_flow(
cls,
flow: Flow,
connections: dict,
working_dir: Optional[Path],
*,
flow_file: Optional[Path] = None,
entry: Optional[str] = None,
storage: Optional[AbstractRunStorage] = None,
raise_ex: bool = True,
node_override: Optional[Dict[str, Dict[str, Any]]] = None,
line_timeout_sec: Optional[int] = None,
):
logger.debug("Start initializing the flow executor.")
working_dir = Flow._resolve_working_dir(flow_file, working_dir)
if node_override:
flow = flow._apply_node_overrides(node_override)
flow = flow._apply_default_node_variants()
package_tool_keys = [node.source.tool for node in flow.nodes if node.source and node.source.tool]
tool_resolver = ToolResolver(working_dir, connections, package_tool_keys)
with _change_working_dir(working_dir):
resolved_tools = [tool_resolver.resolve_tool_by_node(node) for node in flow.nodes]
flow = Flow(
flow.id, flow.name, [r.node for r in resolved_tools], inputs=flow.inputs, outputs=flow.outputs, tools=[]
)
# ensure_flow_valid including validation + resolve
# Todo: 1) split pure validation + resolve from below method 2) provide completed validation()
flow = FlowValidator._validate_nodes_topology(flow)
flow.outputs = FlowValidator._ensure_outputs_valid(flow)
if storage is None:
storage = DefaultRunStorage()
run_tracker = RunTracker(storage)
cache_manager = AbstractCacheManager.init_from_env()
executor = FlowExecutor(
flow=flow,
connections=connections,
run_tracker=run_tracker,
cache_manager=cache_manager,
loaded_tools={r.node.name: r.callable for r in resolved_tools},
entry=entry,
raise_ex=raise_ex,
working_dir=working_dir,
line_timeout_sec=line_timeout_sec,
flow_file=flow_file,
)
logger.debug("The flow executor is initialized successfully.")
return executor
@classmethod
def _is_eager_flow_yaml(cls, flow_file: Path, working_dir: Optional[Path] = None):
if Path(flow_file).suffix.lower() == ".py":
return True
elif Path(flow_file).suffix.lower() in [".yaml", ".yml"]:
flow_file = working_dir / flow_file if working_dir else flow_file
with open(flow_file, "r", encoding="utf-8") as fin:
flow_dag = load_yaml(fin)
if "entry" in flow_dag:
return True
return False
@classmethod
def _parse_eager_flow_yaml(cls, flow_file: Path, working_dir: Optional[Path] = None):
flow_file = working_dir / flow_file if working_dir else flow_file
with open(flow_file, "r", encoding="utf-8") as fin:
flow_dag = load_yaml(fin)
return flow_dag.get("entry", ""), flow_dag.get("path", "")
@classmethod
def load_and_exec_node(
cls,
flow_file: Path,
node_name: str,
*,
storage: AbstractRunStorage = None,
output_sub_dir: Optional[str] = None,
flow_inputs: Optional[Mapping[str, Any]] = None,
dependency_nodes_outputs: Optional[Mapping[str, Any]] = None,
connections: Optional[dict] = None,
working_dir: Optional[Path] = None,
raise_ex: bool = False,
):
"""Load and execute a single node from the flow.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param node_name: The name of the node to be executed.
:type node_name: str
:param storage: The storage to be used for the flow.
:type storage: Optional[~promptflow.storage.AbstractRunStorage]
:param output_sub_dir: The directory to persist image for the flow. Keep it only for backward compatibility.
:type output_sub_dir: Optional[str]
:param flow_inputs: The inputs to be used for the flow. Default is None.
:type flow_inputs: Optional[Mapping[str, Any]]
:param dependency_nodes_outputs: The outputs of the dependency nodes. Default is None.
:type dependency_nodes_outputs: Optional[Mapping[str, Any]
:param connections: The connections to be used for the flow. Default is None.
:type connections: Optional[dict]
:param working_dir: The working directory to be used for the flow. Default is None.
:type working_dir: Optional[str]
:param raise_ex: Whether to raise exceptions or not. Default is False.
:type raise_ex: Optional[bool]
"""
# Inject OpenAI API to make sure traces and headers injection works and
# update OpenAI API configs from environment variables.
inject_openai_api()
OperationContext.get_instance().run_mode = RunMode.SingleNode.name
dependency_nodes_outputs = dependency_nodes_outputs or {}
# Load the node from the flow file
working_dir = Flow._resolve_working_dir(flow_file, working_dir)
with open(working_dir / flow_file, "r") as fin:
flow = Flow.deserialize(load_yaml(fin))
node = flow.get_node(node_name)
if node is None:
raise SingleNodeValidationError(
message_format=(
"Validation failed when attempting to execute the node. "
"Node '{node_name}' is not found in flow '{flow_file}'. "
"Please change node name or correct the flow file."
),
node_name=node_name,
flow_file=flow_file,
)
if not node.source or not node.type:
raise SingleNodeValidationError(
message_format=(
"Validation failed when attempting to execute the node. "
"Properties 'source' or 'type' are not specified for Node '{node_name}' in flow '{flow_file}'. "
"Please make sure these properties are in place and try again."
),
node_name=node_name,
flow_file=flow_file,
)
# Only load the node's referenced flow inputs
node_referenced_flow_inputs = FlowExecutor._get_node_referenced_flow_inputs(node, flow.inputs)
inputs_with_default_value = apply_default_value_for_input(node_referenced_flow_inputs, flow_inputs)
converted_flow_inputs_for_node = FlowValidator.convert_flow_inputs_for_node(
flow, node, inputs_with_default_value
)
inputs = load_multimedia_data(node_referenced_flow_inputs, converted_flow_inputs_for_node)
dependency_nodes_outputs = load_multimedia_data_recursively(dependency_nodes_outputs)
package_tool_keys = [node.source.tool] if node.source and node.source.tool else []
tool_resolver = ToolResolver(working_dir, connections, package_tool_keys)
resolved_node = tool_resolver.resolve_tool_by_node(node)
# Prepare callable and real inputs here
resolved_inputs = {}
for k, v in resolved_node.node.inputs.items():
value = _input_assignment_parser.parse_value(v, dependency_nodes_outputs, inputs)
resolved_inputs[k] = value
if resolved_node.node.aggregation:
# For aggregation node, we need to convert value to list.
if (
v.value_type == InputValueType.FLOW_INPUT
or v.value_type == InputValueType.NODE_REFERENCE
and flow.is_normal_node(v.value)
):
resolved_inputs[k] = [value]
# Note that the init args are only used when resolving the tool,
# so we need to remove them from the inputs before invoking.
resolved_inputs = {k: v for k, v in resolved_inputs.items() if k not in resolved_node.init_args}
if storage is None:
sub_dir = "." if output_sub_dir is None else output_sub_dir
storage = DefaultRunStorage(base_dir=working_dir, sub_dir=Path(sub_dir))
run_tracker = RunTracker(storage)
with run_tracker.node_log_manager:
# Will generate node run in context
context = FlowExecutionContext(
name=flow.name,
run_tracker=run_tracker,
cache_manager=AbstractCacheManager.init_from_env(),
)
try:
if inspect.iscoroutinefunction(resolved_node.callable):
asyncio.run(
context.invoke_tool_async(resolved_node.node, resolved_node.callable, kwargs=resolved_inputs),
)
else:
context.invoke_tool(resolved_node.node, resolved_node.callable, kwargs=resolved_inputs)
except Exception:
if raise_ex: # Only raise exception when raise_ex is True
raise
node_runs = run_tracker.collect_node_runs()
if len(node_runs) != 1:
# Should not happen except there is bug in run_tracker or thread control.
raise UnexpectedError(
message_format=(
"Single node execution failed. Expected one node result, "
"but received {node_result_num}. Please contact support for further assistance."
),
node_result_num=len(node_runs),
)
return node_runs[0]
@staticmethod
def update_environment_variables_with_connections(connections: dict):
"""Update environment variables with connections.
:param connections: A dictionary containing connection information.
:type connections: dict
:return: A dictionary containing updated environment variables.
:rtype: dict
"""
from promptflow._sdk._utils import update_environment_variables_with_connections
return update_environment_variables_with_connections(connections)
def convert_flow_input_types(self, inputs: dict) -> Mapping[str, Any]:
"""Convert the input types of the given inputs dictionary to match the expected types of the flow.
:param inputs: A dictionary containing the inputs to the flow.
:type inputs: dict
:return: A dictionary containing the converted inputs.
:rtype: Mapping[str, Any]
"""
return FlowValidator.resolve_flow_inputs_type(self._flow, inputs)
@property
def _default_inputs_mapping(self):
return {key: f"${{data.{key}}}" for key in self._flow.inputs}
@property
def has_aggregation_node(self) -> bool:
"""Check if the flow executor has any aggregation nodes.
:return: True if the flow executor has at least one aggregation node, False otherwise.
:rtype: bool
"""
return len(self._aggregation_nodes) > 0
@property
def aggregation_nodes(self):
"""Get the aggregation nodes of the flow executor.
:return: A list of aggregation nodes.
:rtype: list
"""
return self._aggregation_nodes
def _fill_lines(self, indexes, values, nlines):
"""Fill the values into the result list according to the indexes."""
result = [None] * nlines
for idx, value in zip(indexes, values):
result[idx] = value
return result
def _exec_aggregation_with_bulk_results(
self,
batch_inputs: List[dict],
results: List[LineResult],
run_id=None,
) -> AggregationResult:
if not self.aggregation_nodes:
return AggregationResult({}, {}, {})
logger.info("Executing aggregation nodes...")
run_infos = [r.run_info for r in results]
succeeded = [i for i, r in enumerate(run_infos) if r.status == Status.Completed]
succeeded_batch_inputs = [batch_inputs[i] for i in succeeded]
resolved_succeeded_batch_inputs = [
FlowValidator.ensure_flow_inputs_type(flow=self._flow, inputs=input) for input in succeeded_batch_inputs
]
succeeded_inputs = transpose(resolved_succeeded_batch_inputs, keys=list(self._flow.inputs.keys()))
aggregation_inputs = transpose(
[result.aggregation_inputs for result in results],
keys=self._aggregation_inputs_references,
)
succeeded_aggregation_inputs = collect_lines(succeeded, aggregation_inputs)
try:
aggr_results = self._exec_aggregation(succeeded_inputs, succeeded_aggregation_inputs, run_id)
logger.info("Finish executing aggregation nodes.")
return aggr_results
except PromptflowException as e:
# For PromptflowException, we already do classification, so throw directly.
raise e
except Exception as e:
error_type_and_message = f"({e.__class__.__name__}) {e}"
raise UnexpectedError(
message_format=(
"Unexpected error occurred while executing the aggregated nodes. "
"Please fix or contact support for assistance. The error details: {error_type_and_message}."
),
error_type_and_message=error_type_and_message,
) from e
@staticmethod
def _try_get_aggregation_input(val: InputAssignment, aggregation_inputs: dict):
if val.value_type != InputValueType.NODE_REFERENCE:
return val
serialized_val = val.serialize()
if serialized_val not in aggregation_inputs:
return val
return InputAssignment(value=aggregation_inputs[serialized_val])
def get_status_summary(self, run_id: str):
"""Get a summary of the status of a given run.
:param run_id: The ID of the run to get the status summary for.
:type run_id: str
:return: A summary of the status of the given run.
:rtype: str
"""
return self._run_tracker.get_status_summary(run_id)
def exec_aggregation(
self,
inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
run_id=None,
node_concurrency=DEFAULT_CONCURRENCY_FLOW,
) -> AggregationResult:
"""Execute the aggregation node of the flow.
:param inputs: A mapping of input names to their values.
:type inputs: Mapping[str, Any]
:param aggregation_inputs: A mapping of aggregation input names to their values.
:type aggregation_inputs: Mapping[str, Any]
:param run_id: The ID of the current run, if any.
:type run_id: Optional[str]
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:return: The result of the aggregation node.
:rtype: ~promptflow.executor._result.AggregationResult
:raises: FlowError if the inputs or aggregation_inputs are invalid.
"""
self._node_concurrency = node_concurrency
aggregated_flow_inputs = dict(inputs or {})
aggregation_inputs = dict(aggregation_inputs or {})
FlowValidator._validate_aggregation_inputs(aggregated_flow_inputs, aggregation_inputs)
aggregated_flow_inputs = self._apply_default_value_for_aggregation_input(
self._flow.inputs, aggregated_flow_inputs, aggregation_inputs
)
# Resolve aggregated_flow_inputs from list of strings to list of objects, whose type is specified in yaml file.
# TODO: For now, we resolve type for batch run's aggregation input in _exec_aggregation_with_bulk_results.
# If we decide to merge the resolve logic into one place, remember to take care of index for batch run.
resolved_aggregated_flow_inputs = FlowValidator.resolve_aggregated_flow_inputs_type(
self._flow, aggregated_flow_inputs
)
with self._run_tracker.node_log_manager:
return self._exec_aggregation(resolved_aggregated_flow_inputs, aggregation_inputs, run_id)
@staticmethod
def _apply_default_value_for_aggregation_input(
inputs: Dict[str, FlowInputDefinition],
aggregated_flow_inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
):
aggregation_lines = 1
if aggregated_flow_inputs.values():
one_input_value = list(aggregated_flow_inputs.values())[0]
aggregation_lines = len(one_input_value)
# If aggregated_flow_inputs is empty, we should use aggregation_inputs to get the length.
elif aggregation_inputs.values():
one_input_value = list(aggregation_inputs.values())[0]
aggregation_lines = len(one_input_value)
for key, value in inputs.items():
if key not in aggregated_flow_inputs and (value and value.default is not None):
aggregated_flow_inputs[key] = [value.default] * aggregation_lines
return aggregated_flow_inputs
def _exec_aggregation(
self,
inputs: Mapping[str, Any],
aggregation_inputs: Mapping[str, Any],
run_id=None,
) -> AggregationResult:
if not self._flow.has_aggregation_node:
return AggregationResult({}, {}, {})
run_id = run_id or str(uuid.uuid4())
nodes = [copy.deepcopy(node) for node in self._flow.nodes if node.aggregation]
# Update the inputs of the aggregation nodes with the aggregation inputs.
for node in nodes:
node.inputs = {
k: FlowExecutor._try_get_aggregation_input(v, aggregation_inputs) for k, v in node.inputs.items()
}
# Load multimedia data for the flow inputs of aggregation nodes.
inputs = load_multimedia_data(self._flow.inputs, inputs)
# TODO: Use a new run tracker to avoid memory increase infinitely.
run_tracker = self._run_tracker
context = FlowExecutionContext(
name=self._flow.name,
run_tracker=run_tracker,
cache_manager=self._cache_manager,
run_id=run_id,
flow_id=self._flow_id,
)
metrics = {}
def _log_metric(key, value):
metrics[key] = value
add_metric_logger(_log_metric)
try:
self._submit_to_scheduler(context, inputs, nodes)
node_run_infos = run_tracker.collect_child_node_runs(run_id)
# Output is set as an empty dict, because the aggregation outputs story is not finalized.
return AggregationResult({}, metrics, {run.node: run for run in node_run_infos})
except Exception:
if self._raise_ex:
raise
node_run_infos = run_tracker.collect_child_node_runs(run_id)
return AggregationResult({}, metrics, {run.node: run for run in node_run_infos})
finally:
remove_metric_logger(_log_metric)
def exec(self, inputs: dict, node_concurrency=DEFAULT_CONCURRENCY_FLOW) -> dict:
"""Executes the flow with the given inputs and returns the output.
:param inputs: A dictionary containing the input values for the flow.
:type inputs: dict
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:return: A dictionary containing the output values of the flow.
:rtype: dict
"""
self._node_concurrency = node_concurrency
inputs = apply_default_value_for_input(self._flow.inputs, inputs)
result = self._exec(inputs)
# TODO: remove this line once serving directly calling self.exec_line
self._add_line_results([result])
return result.output or {}
def _exec_in_thread(self, args) -> LineResult:
inputs, run_id, line_number, variant_id, validate_inputs = args
thread_name = current_thread().name
self._processing_idx[line_number] = thread_name
self._run_tracker._activate_in_context()
results = self._exec(
inputs, run_id=run_id, line_number=line_number, variant_id=variant_id, validate_inputs=validate_inputs
)
self._run_tracker._deactivate_in_context()
self._processing_idx.pop(line_number)
self._completed_idx[line_number] = thread_name
return results
def _extract_aggregation_inputs(self, nodes_outputs: dict):
return {
prop: self._extract_aggregation_input(nodes_outputs, prop) for prop in self._aggregation_inputs_references
}
def _extract_aggregation_input(self, nodes_outputs: dict, aggregation_input_property: str):
assign = InputAssignment.deserialize(aggregation_input_property)
return _input_assignment_parser.parse_value(assign, nodes_outputs, {})
def exec_line(
self,
inputs: Mapping[str, Any],
index: Optional[int] = None,
run_id: Optional[str] = None,
variant_id: str = "",
validate_inputs: bool = True,
node_concurrency=DEFAULT_CONCURRENCY_FLOW,
allow_generator_output: bool = False,
) -> LineResult:
"""Execute a single line of the flow.
:param inputs: The input values for the line.
:type inputs: Mapping[str, Any]
:param index: The index of the line to execute.
:type index: Optional[int]
:param run_id: The ID of the flow run.
:type run_id: Optional[str]
:param variant_id: The ID of the variant to execute.
:type variant_id: str
:param validate_inputs: Whether to validate the input values.
:type validate_inputs: bool
:param node_concurrency: The maximum number of nodes that can be executed concurrently.
:type node_concurrency: int
:param allow_generator_output: Whether to allow generator output.
:type allow_generator_output: bool
:return: The result of executing the line.
:rtype: ~promptflow.executor._result.LineResult
"""
self._node_concurrency = node_concurrency
inputs = apply_default_value_for_input(self._flow.inputs, inputs)
# For flow run, validate inputs as default
with self._run_tracker.node_log_manager:
# exec_line interface may be called when executing a batch run, so we only set run_mode as flow run when
# it is not set.
operation_context = OperationContext.get_instance()
operation_context.run_mode = operation_context.get("run_mode", None) or RunMode.Test.name
line_result = self._exec(
inputs,
run_id=run_id,
line_number=index,
variant_id=variant_id,
validate_inputs=validate_inputs,
allow_generator_output=allow_generator_output,
)
# Return line result with index
if index is not None and isinstance(line_result.output, dict):
line_result.output[LINE_NUMBER_KEY] = index
return line_result
def _add_line_results(self, line_results: List[LineResult], run_tracker: Optional[RunTracker] = None):
run_tracker = run_tracker or self._run_tracker
run_tracker._flow_runs.update({result.run_info.run_id: result.run_info for result in line_results})
run_tracker._node_runs.update(
{
node_run_info.run_id: node_run_info
for result in line_results
for node_run_info in result.node_run_infos.values()
}
)
@staticmethod
def _get_node_referenced_flow_inputs(
node, flow_inputs: Dict[str, FlowInputDefinition]
) -> Dict[str, FlowInputDefinition]:
node_referenced_flow_inputs = {}
for _, value in node.inputs.items():
# Only add flow input to node_referenced_flow_inputs when it is exist and referenced by node.
# If flow input is not exist, we will raise exception in FlowValidator.convert_flow_inputs_for_node.
if value.value_type == InputValueType.FLOW_INPUT and value.value in flow_inputs:
node_referenced_flow_inputs[value.value] = flow_inputs[value.value]
return node_referenced_flow_inputs
def _exec(
self,
inputs: Mapping[str, Any],
run_id: Optional[str] = None,
line_number: Optional[int] = None,
variant_id: str = "",
validate_inputs: bool = False,
allow_generator_output: bool = False,
) -> LineResult:
"""execute line run
Args:
inputs (Mapping): flow inputs
run_id: the id to identify the flow run
line_number: line number for batch inputs
validate_inputs:
Flag to indicate if input validation needed. It is used along with "_raise_ex" to
define if exception shall be raised if inputs validation (type check, etc) failed
The flag is True for Flow Run, False for bulk run as default
allow_generator_output:
Flag to indicate if generator output is allowed.
Returns:
LineResult: Line run result
"""
run_id = run_id or str(uuid.uuid4())
line_run_id = run_id if line_number is None else f"{run_id}_{line_number}"
run_tracker = RunTracker(
self._run_tracker._storage, self._run_tracker._run_mode, self._run_tracker.node_log_manager
)
# We need to copy the allow_generator_types from the original run_tracker.
run_tracker.allow_generator_types = self._run_tracker.allow_generator_types
run_info: FlowRunInfo = run_tracker.start_flow_run(
flow_id=self._flow_id,
root_run_id=run_id,
run_id=line_run_id,
parent_run_id=run_id,
inputs={k: inputs[k] for k in self._flow.inputs if k in inputs},
index=line_number,
variant_id=variant_id,
)
context = FlowExecutionContext(
name=self._flow.name,
run_tracker=run_tracker,
cache_manager=self._cache_manager,
run_id=run_id,
flow_id=self._flow_id,
line_number=line_number,
variant_id=variant_id,
)
output = {}
aggregation_inputs = {}
try:
if validate_inputs:
inputs = FlowValidator.ensure_flow_inputs_type(flow=self._flow, inputs=inputs, idx=line_number)
inputs = load_multimedia_data(self._flow.inputs, inputs)
# Make sure the run_info with converted inputs results rather than original inputs
run_info.inputs = inputs
output, nodes_outputs = self._traverse_nodes(inputs, context)
output = self._stringify_generator_output(output) if not allow_generator_output else output
# Persist the node runs for the nodes that have a generator output
generator_output_nodes = [
nodename for nodename, output in nodes_outputs.items() if isinstance(output, GeneratorType)
]
run_tracker.persist_selected_node_runs(run_info, generator_output_nodes)
run_tracker.allow_generator_types = allow_generator_output
run_tracker.end_run(line_run_id, result=output)
aggregation_inputs = self._extract_aggregation_inputs(nodes_outputs)
except KeyboardInterrupt as ex:
# Run will be cancelled when the process receives a SIGINT signal.
# KeyboardInterrupt will be raised after asyncio finishes its signal handling
# End run with the KeyboardInterrupt exception, so that its status will be Canceled
flow_logger.info("Received KeyboardInterrupt, cancel the run.")
run_tracker.end_run(line_run_id, ex=ex)
raise
except Exception as e:
run_tracker.end_run(line_run_id, ex=e)
if self._raise_ex:
raise
finally:
run_tracker._update_flow_run_info_with_node_runs(run_info)
run_tracker.persist_flow_run(run_info)
node_run_infos = run_tracker.collect_child_node_runs(line_run_id)
node_runs = {node_run.node: node_run for node_run in node_run_infos}
return LineResult(output, aggregation_inputs, run_info, node_runs)
def _extract_outputs(self, nodes_outputs, bypassed_nodes, flow_inputs):
outputs = {}
for name, output in self._flow.outputs.items():
if output.reference.value_type == InputValueType.LITERAL:
outputs[name] = output.reference.value
continue
if output.reference.value_type == InputValueType.FLOW_INPUT:
outputs[name] = flow_inputs[output.reference.value]
continue
if output.reference.value_type != InputValueType.NODE_REFERENCE:
raise NotSupported(
message_format=(
"The output type '{output_type}' is currently unsupported. "
"Please choose from available types: '{supported_output_type}' and try again."
),
output_type=output.reference.value_type.value
if hasattr(output.reference.value_type, "value")
else output.reference.value_type,
supported_output_type=[output_type.value for output_type in InputValueType],
)
node = next((n for n in self._flow.nodes if n.name == output.reference.value), None)
if not node:
raise OutputReferenceNotExist(
message_format=(
"The output '{output_name}' for flow is incorrect. The node '{node_name}' "
"referenced by the output '{output_name}' can not found in flow. "
"Please rectify the error in your flow and try again."
),
node_name=output.reference.value,
output_name=name,
)
if node.aggregation:
# Note that the reduce node referenced in the output is not supported.
continue
if node.name not in nodes_outputs:
raise NodeOutputNotFound(
message_format=(
"The output '{output_name}' for flow is incorrect. "
"No outputs found for node '{node_name}'. Please review the problematic "
"output and rectify the error."
),
output_name=name,
node_name=node.name,
)
if output.reference.value in bypassed_nodes:
flow_logger.warning(
f"The node referenced by output:'{output.reference.value}' is bypassed, which is not recommended."
)
node_result = nodes_outputs[output.reference.value]
outputs[name] = _input_assignment_parser.parse_node_property(
output.reference.value, node_result, output.reference.property
)
return outputs
def _should_use_async(self):
return (
all(inspect.iscoroutinefunction(f) for f in self._tools_manager._tools.values())
or os.environ.get("PF_USE_ASYNC", "false").lower() == "true"
)
def _traverse_nodes(self, inputs, context: FlowExecutionContext) -> Tuple[dict, dict]:
batch_nodes = [node for node in self._flow.nodes if not node.aggregation]
outputs = {}
# TODO: Use a mixed scheduler to support both async and thread pool mode.
if self._should_use_async():
flow_logger.info("Start executing nodes in async mode.")
scheduler = AsyncNodesScheduler(self._tools_manager, self._node_concurrency)
nodes_outputs, bypassed_nodes = asyncio.run(scheduler.execute(batch_nodes, inputs, context))
else:
flow_logger.info("Start executing nodes in thread pool mode.")
nodes_outputs, bypassed_nodes = self._submit_to_scheduler(context, inputs, batch_nodes)
outputs = self._extract_outputs(nodes_outputs, bypassed_nodes, inputs)
return outputs, nodes_outputs
def _stringify_generator_output(self, outputs: dict):
for k, v in outputs.items():
if isinstance(v, GeneratorType):
outputs[k] = "".join(str(chuck) for chuck in v)
return outputs
def _submit_to_scheduler(self, context: FlowExecutionContext, inputs, nodes: List[Node]) -> Tuple[dict, dict]:
if not isinstance(self._node_concurrency, int):
raise UnexpectedError(
message_format=(
"Flow execution failed. To proceed, ensure that a valid node concurrency value is set. "
"The current value is {current_value}. Please contact support for further assistance."
),
current_value=self._node_concurrency,
)
return FlowNodesScheduler(
self._tools_manager,
inputs,
nodes,
self._node_concurrency,
context,
).execute(self._line_timeout_sec)
@staticmethod
def apply_inputs_mapping(
inputs: Mapping[str, Mapping[str, Any]],
inputs_mapping: Mapping[str, str],
) -> Dict[str, Any]:
# TODO: This function will be removed after the batch engine refactoring is completed.
from promptflow.batch._batch_inputs_processor import apply_inputs_mapping
return apply_inputs_mapping(inputs, inputs_mapping)
def enable_streaming_for_llm_flow(self, stream_required: Callable[[], bool]):
"""Enable the LLM node that is connected to output to return streaming results controlled by `stream_required`.
If the stream_required callback returns True, the LLM node will return a generator of strings.
Otherwise, the LLM node will return a string.
:param stream_required: A callback that takes no arguments and returns a boolean value indicating whether \
streaming results should be enabled for the LLM node.
:type stream_required: Callable[[], bool]
:return: None
"""
for node in self._flow.nodes:
streaming_option_parameter = self._parse_streaming_option_parameter(node)
if (
streaming_option_parameter is not None
and self._flow.is_referenced_by_flow_output(node)
and not self._flow.is_referenced_by_other_node(node)
):
wrapper = _inject_stream_options(stream_required, streaming_option_parameter)
self._tools_manager.wrap_tool(node.name, wrapper=wrapper)
def _parse_streaming_option_parameter(self, node: Node) -> Optional[str]:
if self._flow.is_llm_node(node):
return "stream"
tool_function = self._tools_manager.get_tool(node.name)
return getattr(tool_function, STREAMING_OPTION_PARAMETER_ATTR, None)
def ensure_flow_is_serializable(self):
"""Ensure that the flow is serializable.
Some of the nodes may return a generator of strings to create streaming outputs.
This is useful when the flow is deployed as a web service.
However, in the interactive mode, the executor assumes that the node result is JSON serializable.
This method adds a wrapper to each node in the flow
to consume the streaming outputs and merge them into a string for executor usage.
:return: None
"""
for node in self._flow.nodes:
self._tools_manager.wrap_tool(node.name, wrapper=_ensure_node_result_is_serializable)
def _inject_stream_options(should_stream: Callable[[], bool], streaming_option_parameter="stream"):
"""Inject the stream options to the decorated function.
AzureOpenAI.completion and AzureOpenAI.chat tools support both stream and non-stream mode.
The stream mode is controlled by the "stream" parameter.
"""
def stream_option_decorator(f):
# We only wrap the function if it has a "stream" parameter
signature = inspect.signature(f)
if streaming_option_parameter not in signature.parameters:
return f
@functools.wraps(f)
def wrapper(*args, **kwargs):
kwargs = kwargs or {}
kwargs.update({streaming_option_parameter: should_stream()})
return f(*args, **kwargs)
return wrapper
return stream_option_decorator
def enable_streaming_for_llm_tool(f):
"""Enable the stream mode for LLM tools that support it.
:param f: The function to wrap.
:type f: function
:return: The wrapped function.
:rtype: function
AzureOpenAI.completion and AzureOpenAI.chat tools support both stream and non-stream mode.
The stream mode is turned off by default. Use this wrapper to turn it on.
"""
# We only wrap the function if it has a "stream" parameter
signature = inspect.signature(f)
if "stream" not in signature.parameters:
return f
@functools.wraps(f)
def wrapper(*args, **kwargs):
kwargs = kwargs or {}
kwargs.update(stream=True)
return f(*args, **kwargs)
return wrapper
def _ensure_node_result_is_serializable(f):
"""Ensure the node result is serializable.
Some of the nodes may return a generator of strings to create streaming outputs.
This is useful when the flow is deployed as a web service.
However, in the interactive mode, the executor assumes that the node result is JSON serializable.
This wrapper ensures the node result is serializable
by consuming the data from the generator and merging them into a string.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
result = f(*args, **kwargs)
if isinstance(result, GeneratorType):
result = "".join(str(trunk) for trunk in result)
return result
return wrapper
def execute_flow(
flow_file: Path,
working_dir: Path,
output_dir: Path,
connections: dict,
inputs: Mapping[str, Any],
*,
run_aggregation: bool = True,
enable_stream_output: bool = False,
allow_generator_output: bool = False, # TODO: remove this
**kwargs,
) -> LineResult:
"""Execute the flow, including aggregation nodes.
:param flow_file: The path to the flow file.
:type flow_file: Path
:param working_dir: The working directory of the flow.
:type working_dir: Path
:param output_dir: Relative path relative to working_dir.
:type output_dir: Path
:param connections: A dictionary containing connection information.
:type connections: dict
:param inputs: A dictionary containing the input values for the flow.
:type inputs: Mapping[str, Any]
:param enable_stream_output: Whether to allow stream (generator) output for flow output. Default is False.
:type enable_stream_output: Optional[bool]
:param kwargs: Other keyword arguments to create flow executor.
:type kwargs: Any
:return: The line result of executing the flow.
:rtype: ~promptflow.executor._result.LineResult
"""
flow_executor = FlowExecutor.create(flow_file, connections, working_dir, raise_ex=False, **kwargs)
flow_executor.enable_streaming_for_llm_flow(lambda: enable_stream_output)
with _change_working_dir(working_dir):
# execute nodes in the flow except the aggregation nodes
# TODO: remove index=0 after UX no longer requires a run id similar to batch runs
# (run_id_index, eg. xxx_0) for displaying the interface
line_result = flow_executor.exec_line(inputs, index=0, allow_generator_output=allow_generator_output)
# persist the output to the output directory
line_result.output = persist_multimedia_data(line_result.output, base_dir=working_dir, sub_dir=output_dir)
if run_aggregation and line_result.aggregation_inputs:
# convert inputs of aggregation to list type
flow_inputs = {k: [v] for k, v in inputs.items()}
aggregation_inputs = {k: [v] for k, v in line_result.aggregation_inputs.items()}
aggregation_results = flow_executor.exec_aggregation(flow_inputs, aggregation_inputs=aggregation_inputs)
line_result.node_run_infos = {**line_result.node_run_infos, **aggregation_results.node_run_infos}
line_result.run_info.metrics = aggregation_results.metrics
if isinstance(line_result.output, dict):
# remove line_number from output
line_result.output.pop(LINE_NUMBER_KEY, None)
return line_result
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_input_assignment_parser.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import re
from promptflow._core._errors import NotSupported
from promptflow.contracts.flow import InputAssignment, InputValueType
from promptflow.executor._errors import (
InputNotFound,
InputNotFoundFromAncestorNodeOutput,
InvalidReferenceProperty,
UnsupportedReference,
)
def parse_value(i: InputAssignment, nodes_outputs: dict, flow_inputs: dict):
if i.value_type == InputValueType.LITERAL:
return i.value
if i.value_type == InputValueType.FLOW_INPUT:
if i.value not in flow_inputs:
flow_input_keys = ", ".join(flow_inputs.keys()) if flow_inputs is not None else None
raise InputNotFound(
message_format=(
"Flow execution failed. "
"The input '{input_name}' is not found from flow inputs '{flow_input_keys}'. "
"Please check the input name and try again."
),
input_name=i.value,
flow_input_keys=flow_input_keys,
)
return flow_inputs[i.value]
if i.value_type == InputValueType.NODE_REFERENCE:
if i.section != "output":
raise UnsupportedReference(
message_format=(
"Flow execution failed. "
"The section '{reference_section}' of reference is currently unsupported. "
"Please specify the output part of the node '{reference_node_name}'."
),
reference_section=i.section,
reference_node_name=i.value,
)
if i.value not in nodes_outputs:
node_output_keys = [output_keys for output_keys in nodes_outputs.keys() if nodes_outputs]
raise InputNotFoundFromAncestorNodeOutput(
message_format=(
"Flow execution failed. "
"The input '{input_name}' is not found from ancestor node outputs {node_output_keys}. "
"Please check the node name and try again."
),
input_name=i.value,
node_output_keys=node_output_keys,
)
return parse_node_property(i.value, nodes_outputs[i.value], i.property)
raise NotSupported(
message_format=(
"Flow execution failed. "
"The type '{input_type}' is currently unsupported. "
"Please choose from available types: {supported_output_type} and try again."
),
input_type=i.value_type.value if hasattr(i.value_type, "value") else i.value_type,
supported_output_type=[value_type.value for value_type in InputValueType],
)
property_pattern = r"(\w+)|(\['.*?'\])|(\[\d+\])"
def parse_node_property(node_name, node_val, property=""):
val = node_val
property_parts = re.findall(property_pattern, property)
try:
for part in property_parts:
part = [p for p in part if p][0]
if part.startswith("[") and part.endswith("]"):
index = part[1:-1]
if index.startswith("'") and index.endswith("'") or index.startswith('"') and index.endswith('"'):
index = index[1:-1]
elif index.isdigit():
index = int(index)
else:
raise InvalidReferenceProperty(
message_format=(
"Flow execution failed. "
"Invalid index '{index}' when accessing property '{property}' of the node '{node_name}'. "
"Please check the index and try again."
),
index=index,
property=property,
node_name=node_name,
)
val = val[index]
else:
if isinstance(val, dict):
val = val[part]
else:
val = getattr(val, part)
except (KeyError, IndexError, AttributeError) as e:
message_format = (
"Flow execution failed. "
"Invalid property '{property}' when accessing the node '{node_name}'. "
"Please check the property and try again."
)
raise InvalidReferenceProperty(message_format=message_format, property=property, node_name=node_name) from e
return val
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_result.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from dataclasses import dataclass
from typing import Any, Dict, Mapping
from promptflow.contracts.run_info import FlowRunInfo, RunInfo
@dataclass
class LineResult:
"""The result of a line process."""
output: Mapping[str, Any] # The output of the line.
# The node output values to be used as aggregation inputs, if no aggregation node, it will be empty.
aggregation_inputs: Mapping[str, Any]
run_info: FlowRunInfo # The run info of the line.
node_run_infos: Mapping[str, RunInfo] # The run info of the nodes in the line.
@staticmethod
def deserialize(data: dict) -> "LineResult":
"""Deserialize the LineResult from a dict."""
return LineResult(
output=data.get("output"),
aggregation_inputs=data.get("aggregation_inputs", {}),
run_info=FlowRunInfo.deserialize(data.get("run_info")),
node_run_infos={k: RunInfo.deserialize(v) for k, v in data.get("node_run_infos", {}).items()},
)
@dataclass
class AggregationResult:
"""The result when running aggregation nodes in the flow."""
output: Mapping[str, Any] # The output of the aggregation nodes in the flow.
metrics: Dict[str, Any] # The metrics generated by the aggregation.
node_run_infos: Mapping[str, RunInfo] # The run info of the aggregation nodes.
@staticmethod
def deserialize(data: dict) -> "AggregationResult":
"""Deserialize the AggregationResult from a dict."""
return AggregationResult(
output=data.get("output", None),
metrics=data.get("metrics", None),
node_run_infos={k: RunInfo.deserialize(v) for k, v in data.get("node_run_infos", {}).items()},
)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/flow_validator.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import copy
from json import JSONDecodeError
from typing import Any, List, Mapping, Optional
from promptflow._utils.logger_utils import logger
from promptflow.contracts.flow import Flow, InputValueType, Node
from promptflow.contracts.tool import ValueType
from promptflow.executor._errors import (
DuplicateNodeName,
EmptyOutputReference,
InputNotFound,
InputParseError,
InputReferenceNotFound,
InputTypeError,
InvalidAggregationInput,
InvalidNodeReference,
NodeCircularDependency,
NodeReferenceNotFound,
OutputReferenceNotFound,
)
class FlowValidator:
"""This is a validation class designed to verify the integrity and validity of flow definitions and input data."""
@staticmethod
def _ensure_nodes_order(flow: Flow):
dependencies = {n.name: set() for n in flow.nodes}
aggregation_nodes = set(node.name for node in flow.nodes if node.aggregation)
for n in flow.nodes:
inputs_list = [i for i in n.inputs.values()]
if n.activate:
if (
n.aggregation
and n.activate.condition.value_type == InputValueType.NODE_REFERENCE
and n.activate.condition.value not in aggregation_nodes
):
msg_format = (
"Invalid node definitions found in the flow graph. Non-aggregation node '{invalid_reference}' "
"cannot be referenced in the activate config of the aggregation node '{node_name}'. Please "
"review and rectify the node reference."
)
raise InvalidNodeReference(
message_format=msg_format, invalid_reference=n.activate.condition.value, node_name=n.name
)
inputs_list.extend([n.activate.condition])
for i in inputs_list:
if i.value_type != InputValueType.NODE_REFERENCE:
continue
if i.value not in dependencies:
msg_format = (
"Invalid node definitions found in the flow graph. Node '{node_name}' references "
"a non-existent node '{reference_node_name}' in your flow. Please review your flow to "
"ensure that the node name is accurately specified."
)
raise NodeReferenceNotFound(
message_format=msg_format, node_name=n.name, reference_node_name=i.value
)
dependencies[n.name].add(i.value)
if not n.aggregation:
invalid_reference = dependencies[n.name].intersection(aggregation_nodes)
if invalid_reference:
msg_format = (
"Invalid node definitions found in the flow graph. Non-aggregate node '{node_name}' "
"cannot reference aggregate nodes {invalid_reference}. Please review and rectify "
"the node reference."
)
raise InvalidNodeReference(
message_format=msg_format, node_name=n.name, invalid_reference=invalid_reference
)
sorted_nodes = []
picked = set()
for _ in range(len(flow.nodes)):
available_nodes_iterator = (
n for n in flow.nodes if n.name not in picked and all(d in picked for d in dependencies[n.name])
)
node_to_pick = next(available_nodes_iterator, None)
if not node_to_pick:
# Figure out the nodes names with circular dependency problem alphabetically
remaining_nodes = sorted(list(set(dependencies.keys()) - picked))
raise NodeCircularDependency(
message_format=(
"Invalid node definitions found in the flow graph. Node circular dependency has been detected "
"among the nodes in your flow. Kindly review the reference relationships for the nodes "
"{remaining_nodes} and resolve the circular reference issue in the flow."
),
remaining_nodes=remaining_nodes,
)
sorted_nodes.append(node_to_pick)
picked.add(node_to_pick.name)
if any(n1.name != n2.name for n1, n2 in zip(flow.nodes, sorted_nodes)):
return Flow(
id=flow.id,
name=flow.name,
nodes=sorted_nodes,
inputs=flow.inputs,
outputs=flow.outputs,
tools=flow.tools,
)
return copy.copy(flow)
@staticmethod
def _validate_nodes_topology(flow: Flow) -> Flow:
node_names = set()
for node in flow.nodes:
if node.name in node_names:
raise DuplicateNodeName(
message_format=(
"Invalid node definitions found in the flow graph. Node with name '{node_name}' appears "
"more than once in the node definitions in your flow, which is not allowed. To address "
"this issue, please review your flow and either rename or remove nodes with identical names."
),
node_name=node.name,
)
node_names.add(node.name)
for node in flow.nodes:
for v in node.inputs.values():
if v.value_type != InputValueType.FLOW_INPUT:
continue
if v.value not in flow.inputs:
msg_format = (
"Invalid node definitions found in the flow graph. Node '{node_name}' references flow input "
"'{flow_input_name}' which is not defined in your flow. To resolve this issue, "
"please review your flow, ensuring that you either add the missing flow inputs "
"or adjust node reference to the correct flow input."
)
raise InputReferenceNotFound(
message_format=msg_format, node_name=node.name, flow_input_name=v.value
)
return FlowValidator._ensure_nodes_order(flow)
@staticmethod
def _parse_input_value(input_key: str, input_value: Any, expected_type: ValueType, idx=None):
try:
return expected_type.parse(input_value)
except JSONDecodeError as e:
line_info = "" if idx is None else f" in line {idx} of input data"
flow_input_info = f"'{input_key}'{line_info}"
error_type_and_message = f"({e.__class__.__name__}) {e}"
msg_format = (
"Failed to parse the flow input. The value for flow input {flow_input_info} "
"was interpreted as JSON string since its type is '{value_type}'. However, the value "
"'{input_value}' is invalid for JSON parsing. Error details: {error_type_and_message}. "
"Please make sure your inputs are properly formatted."
)
raise InputParseError(
message_format=msg_format,
flow_input_info=flow_input_info,
input_value=input_value,
value_type=expected_type.value if hasattr(expected_type, "value") else expected_type,
error_type_and_message=error_type_and_message,
) from e
except Exception as e:
line_info = "" if idx is None else f" in line {idx} of input data"
flow_input_info = f"'{input_key}'{line_info}"
msg_format = (
"The input for flow is incorrect. The value for flow input {flow_input_info} "
"does not match the expected type '{expected_type}'. Please change flow input type "
"or adjust the input value in your input data."
)
expected_type_value = expected_type.value if hasattr(expected_type, "value") else expected_type
raise InputTypeError(
message_format=msg_format, flow_input_info=flow_input_info, expected_type=expected_type_value
) from e
@staticmethod
def resolve_aggregated_flow_inputs_type(flow: Flow, inputs: Mapping[str, List[Any]]) -> Mapping[str, Any]:
updated_inputs = {}
for input_key, input_def in flow.inputs.items():
if input_key in inputs:
input_value_list = inputs[input_key]
updated_inputs[input_key] = [
FlowValidator._parse_input_value(input_key, each_line_item, input_def.type, idx)
for idx, each_line_item in enumerate(input_value_list)
]
return updated_inputs
@staticmethod
def resolve_flow_inputs_type(flow: Flow, inputs: Mapping[str, Any], idx: Optional[int] = None) -> Mapping[str, Any]:
"""Resolve inputs by type if existing. Ignore missing inputs.
:param flow: The `flow` parameter is of type `Flow` and represents a flow object
:type flow: ~promptflow.contracts.flow.Flow
:param inputs: A dictionary containing the input values for the flow. The keys are the names of the
flow inputs, and the values are the corresponding input values
:type inputs: Mapping[str, Any]
:param idx: The `idx` parameter is an optional integer that represents the line index of the input
data. It is used to provide additional information in case there is an error with the input data
:type idx: Optional[int]
:return: The updated inputs with values are type-converted based on the expected type specified
in the `flow` object.
:rtype: Mapping[str, Any]
"""
updated_inputs = {k: v for k, v in inputs.items()}
for k, v in flow.inputs.items():
if k in inputs:
updated_inputs[k] = FlowValidator._parse_input_value(k, inputs[k], v.type, idx)
return updated_inputs
@staticmethod
def ensure_flow_inputs_type(flow: Flow, inputs: Mapping[str, Any], idx: Optional[int] = None) -> Mapping[str, Any]:
"""Make sure the inputs are completed and in the correct type. Raise Exception if not valid.
:param flow: The `flow` parameter is of type `Flow` and represents a flow object
:type flow: ~promptflow.contracts.flow.Flow
:param inputs: A dictionary containing the input values for the flow. The keys are the names of the
flow inputs, and the values are the corresponding input values
:type inputs: Mapping[str, Any]
:param idx: The `idx` parameter is an optional integer that represents the line index of the input
data. It is used to provide additional information in case there is an error with the input data
:type idx: Optional[int]
:return: The updated inputs, where the values are type-converted based on the expected
type specified in the `flow` object.
:rtype: Mapping[str, Any]
"""
for k, v in flow.inputs.items():
if k not in inputs:
line_info = "in input data" if idx is None else f"in line {idx} of input data"
msg_format = (
"The input for flow is incorrect. The value for flow input '{input_name}' is not "
"provided {line_info}. Please review your input data or remove this input in your flow "
"if it's no longer needed."
)
raise InputNotFound(message_format=msg_format, input_name=k, line_info=line_info)
return FlowValidator.resolve_flow_inputs_type(flow, inputs, idx)
@staticmethod
def convert_flow_inputs_for_node(flow: Flow, node: Node, inputs: Mapping[str, Any]) -> Mapping[str, Any]:
"""Filter the flow inputs for node and resolve the value by type.
:param flow: The `flow` parameter is an instance of the `Flow` class. It represents the flow or
workflow that contains the node and inputs
:type flow: ~promptflow.contracts.flow.Flow
:param node: The `node` parameter is an instance of the `Node` class
:type node: ~promptflow.contracts.flow.Node
:param inputs: A dictionary containing the input values for the node. The keys are the names of the
input variables, and the values are the corresponding input values
:type inputs: Mapping[str, Any]
:return: the resolved flow inputs which are needed by the node only by the node only.
:rtype: Mapping[str, Any]
"""
updated_inputs = {}
inputs = inputs or {}
for k, v in node.inputs.items():
if v.value_type == InputValueType.FLOW_INPUT:
if v.value not in flow.inputs:
raise InputNotFound(
message_format=(
"The input for node is incorrect. Node input '{node_input_name}' is not found "
"from flow inputs of node '{node_name}'. Please review the node definition in your flow."
),
node_input_name=v.value,
node_name=node.name,
)
if v.value not in inputs:
raise InputNotFound(
message_format=(
"The input for node is incorrect. Node input '{node_input_name}' is not found "
"in input data for node '{node_name}'. Please verify the inputs data for the node."
),
node_input_name=v.value,
node_name=node.name,
)
try:
updated_inputs[v.value] = flow.inputs[v.value].type.parse(inputs[v.value])
except Exception as e:
msg_format = (
"The input for node is incorrect. Value for input '{input_name}' of node '{node_name}' "
"is not type '{expected_type}'. Please review and rectify the input data."
)
raise InputTypeError(
message_format=msg_format,
input_name=k,
node_name=node.name,
expected_type=flow.inputs[v.value].type.value,
) from e
return updated_inputs
@staticmethod
def _validate_aggregation_inputs(aggregated_flow_inputs: Mapping[str, Any], aggregation_inputs: Mapping[str, Any]):
"""Validate the aggregation inputs according to the flow inputs."""
for key, value in aggregated_flow_inputs.items():
if key in aggregation_inputs:
raise InvalidAggregationInput(
message_format=(
"The input for aggregation is incorrect. The input '{input_key}' appears in both "
"aggregated flow input and aggregated reference input. "
"Please remove one of them and try the operation again."
),
input_key=key,
)
if not isinstance(value, list):
raise InvalidAggregationInput(
message_format=(
"The input for aggregation is incorrect. "
"The value for aggregated flow input '{input_key}' should be a list, "
"but received {value_type}. Please adjust the input value to match the expected format."
),
input_key=key,
value_type=type(value).__name__,
)
for key, value in aggregation_inputs.items():
if not isinstance(value, list):
raise InvalidAggregationInput(
message_format=(
"The input for aggregation is incorrect. "
"The value for aggregated reference input '{input_key}' should be a list, "
"but received {value_type}. Please adjust the input value to match the expected format."
),
input_key=key,
value_type=type(value).__name__,
)
inputs_len = {key: len(value) for key, value in aggregated_flow_inputs.items()}
inputs_len.update({key: len(value) for key, value in aggregation_inputs.items()})
if len(set(inputs_len.values())) > 1:
raise InvalidAggregationInput(
message_format=(
"The input for aggregation is incorrect. "
"The length of all aggregated inputs should be the same. Current input lengths are: "
"{key_len}. Please adjust the input value in your input data."
),
key_len=inputs_len,
)
@staticmethod
def _ensure_outputs_valid(flow: Flow):
updated_outputs = {}
for k, v in flow.outputs.items():
if v.reference.value_type == InputValueType.LITERAL and v.reference.value == "":
msg_format = (
"The output '{output_name}' for flow is incorrect. The reference is not specified for "
"the output '{output_name}' in the flow. To rectify this, "
"ensure that you accurately specify the reference in the flow."
)
raise EmptyOutputReference(message_format=msg_format, output_name=k)
if v.reference.value_type == InputValueType.FLOW_INPUT and v.reference.value not in flow.inputs:
msg_format = (
"The output '{output_name}' for flow is incorrect. The output '{output_name}' references "
"non-existent flow input '{flow_input_name}' in your flow. Please carefully review your flow and "
"correct the reference definition for the output in question."
)
raise OutputReferenceNotFound(
message_format=msg_format, output_name=k, flow_input_name=v.reference.value
)
if v.reference.value_type == InputValueType.NODE_REFERENCE:
node = flow.get_node(v.reference.value)
if node is None:
msg_format = (
"The output '{output_name}' for flow is incorrect. The output '{output_name}' references "
"non-existent node '{node_name}' in your flow. To resolve this issue, please carefully review "
"your flow and correct the reference definition for the output in question."
)
raise OutputReferenceNotFound(message_format=msg_format, output_name=k, node_name=v.reference.value)
if node.aggregation:
msg = f"Output '{k}' references a reduce node '{v.reference.value}', will not take effect."
logger.warning(msg)
# We will not add this output to the flow outputs, so we simply ignore it here
continue
updated_outputs[k] = v
return updated_outputs
@staticmethod
def ensure_flow_valid_in_batch_mode(flow: Flow):
if not flow.inputs:
message = (
"The input for flow cannot be empty in batch mode. Please review your flow and provide valid inputs."
)
raise InputNotFound(message=message)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_async_nodes_scheduler.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import asyncio
import contextvars
import inspect
import os
import signal
import threading
import time
import traceback
from asyncio import Task
from concurrent.futures import ThreadPoolExecutor
from typing import Any, Dict, List, Tuple
from promptflow._core.flow_execution_context import FlowExecutionContext
from promptflow._core.tools_manager import ToolsManager
from promptflow._utils.logger_utils import flow_logger
from promptflow._utils.utils import extract_user_frame_summaries, set_context
from promptflow.contracts.flow import Node
from promptflow.executor._dag_manager import DAGManager
from promptflow.executor._errors import NoNodeExecutedError
PF_ASYNC_NODE_SCHEDULER_EXECUTE_TASK_NAME = "_pf_async_nodes_scheduler.execute"
DEFAULT_TASK_LOGGING_INTERVAL = 60
ASYNC_DAG_MANAGER_COMPLETED = False
class AsyncNodesScheduler:
def __init__(
self,
tools_manager: ToolsManager,
node_concurrency: int,
) -> None:
self._tools_manager = tools_manager
self._node_concurrency = node_concurrency
self._task_start_time = {}
self._task_last_log_time = {}
self._dag_manager_completed_event = threading.Event()
async def execute(
self,
nodes: List[Node],
inputs: Dict[str, Any],
context: FlowExecutionContext,
) -> Tuple[dict, dict]:
# TODO: Provide cancel API
if threading.current_thread() is threading.main_thread():
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
else:
flow_logger.info(
"Current thread is not main thread, skip signal handler registration in AsyncNodesScheduler."
)
# Semaphore should be created in the loop, otherwise it will not work.
loop = asyncio.get_running_loop()
self._semaphore = asyncio.Semaphore(self._node_concurrency)
monitor = threading.Thread(
target=monitor_long_running_coroutine,
args=(loop, self._task_start_time, self._task_last_log_time, self._dag_manager_completed_event),
daemon=True,
)
monitor.start()
# Set the name of scheduler tasks to avoid monitoring its duration
task = asyncio.current_task()
task.set_name(PF_ASYNC_NODE_SCHEDULER_EXECUTE_TASK_NAME)
parent_context = contextvars.copy_context()
executor = ThreadPoolExecutor(
max_workers=self._node_concurrency, initializer=set_context, initargs=(parent_context,)
)
# Note that we must not use `with` statement to manage the executor.
# This is because it will always call `executor.shutdown()` when exiting the `with` block.
# Then the event loop will wait for all tasks to be completed before raising the cancellation error.
# See reference: https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.Executor
outputs = await self._execute_with_thread_pool(executor, nodes, inputs, context)
executor.shutdown()
return outputs
async def _execute_with_thread_pool(
self,
executor: ThreadPoolExecutor,
nodes: List[Node],
inputs: Dict[str, Any],
context: FlowExecutionContext,
) -> Tuple[dict, dict]:
flow_logger.info(f"Start to run {len(nodes)} nodes with the current event loop.")
dag_manager = DAGManager(nodes, inputs)
task2nodes = self._execute_nodes(dag_manager, context, executor)
while not dag_manager.completed():
task2nodes = await self._wait_and_complete_nodes(task2nodes, dag_manager)
submitted_tasks2nodes = self._execute_nodes(dag_manager, context, executor)
task2nodes.update(submitted_tasks2nodes)
# Set the event to notify the monitor thread to exit
# Ref: https://docs.python.org/3/library/threading.html#event-objects
self._dag_manager_completed_event.set()
for node in dag_manager.bypassed_nodes:
dag_manager.completed_nodes_outputs[node] = None
return dag_manager.completed_nodes_outputs, dag_manager.bypassed_nodes
async def _wait_and_complete_nodes(self, task2nodes: Dict[Task, Node], dag_manager: DAGManager) -> Dict[Task, Node]:
if not task2nodes:
raise NoNodeExecutedError("No nodes are ready for execution, but the flow is not completed.")
tasks = [task for task in task2nodes]
for task in tasks:
self._task_start_time[task] = time.time()
done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
dag_manager.complete_nodes({task2nodes[task].name: task.result() for task in done})
for task in done:
del task2nodes[task]
return task2nodes
def _execute_nodes(
self,
dag_manager: DAGManager,
context: FlowExecutionContext,
executor: ThreadPoolExecutor,
) -> Dict[Task, Node]:
# Bypass nodes and update node run info until there are no nodes to bypass
nodes_to_bypass = dag_manager.pop_bypassable_nodes()
while nodes_to_bypass:
for node in nodes_to_bypass:
context.bypass_node(node)
nodes_to_bypass = dag_manager.pop_bypassable_nodes()
# Create tasks for ready nodes
return {
self._create_node_task(node, dag_manager, context, executor): node for node in dag_manager.pop_ready_nodes()
}
async def run_task_with_semaphore(self, coroutine):
async with self._semaphore:
return await coroutine
def _create_node_task(
self,
node: Node,
dag_manager: DAGManager,
context: FlowExecutionContext,
executor: ThreadPoolExecutor,
) -> Task:
f = self._tools_manager.get_tool(node.name)
kwargs = dag_manager.get_node_valid_inputs(node, f)
if inspect.iscoroutinefunction(f):
# For async task, it will not be executed before calling create_task.
task = context.invoke_tool_async(node, f, kwargs)
else:
# For sync task, convert it to async task and run it in executor thread.
# Even though the task is put to the thread pool, thread.start will only be triggered after create_task.
task = self._sync_function_to_async_task(executor, context, node, f, kwargs)
# Set the name of the task to the node name for debugging purpose
# It does not need to be unique by design.
# Wrap the coroutine in a task with asyncio.create_task to schedule it for event loop execution
# The task is created and added to the event loop, but the exact execution depends on loop's scheduling
return asyncio.create_task(self.run_task_with_semaphore(task), name=node.name)
@staticmethod
async def _sync_function_to_async_task(
executor: ThreadPoolExecutor,
context: FlowExecutionContext,
node,
f,
kwargs,
):
# The task will not be executed before calling create_task.
return await asyncio.get_running_loop().run_in_executor(executor, context.invoke_tool, node, f, kwargs)
def signal_handler(sig, frame):
"""
Start a thread to monitor coroutines after receiving signal.
"""
flow_logger.info(f"Received signal {sig}({signal.Signals(sig).name}), start coroutine monitor thread.")
loop = asyncio.get_running_loop()
monitor = threading.Thread(target=monitor_coroutine_after_cancellation, args=(loop,))
monitor.start()
raise KeyboardInterrupt
def log_stack_recursively(task: asyncio.Task, elapse_time: float):
"""Recursively log the frame of a task or coroutine.
Traditional stacktrace would stop at the first awaited nested inside the coroutine.
:param task: Task to log
:type task_or_coroutine: asyncio.Task
:param elapse_time: Seconds elapsed since the task started
:type elapse_time: float
"""
# We cannot use task.get_stack() to get the stack, because only one stack frame is
# returned for a suspended coroutine because of the implementation of CPython
# Ref: https://github.com/python/cpython/blob/main/Lib/asyncio/tasks.py
# "only one stack frame is returned for a suspended coroutine."
task_or_coroutine = task
frame_summaries = []
# Collect frame_summaries along async call chain
while True:
if isinstance(task_or_coroutine, asyncio.Task):
# For a task, get the coroutine it's running
coroutine: asyncio.coroutine = task_or_coroutine.get_coro()
elif asyncio.iscoroutine(task_or_coroutine):
coroutine = task_or_coroutine
else:
break
frame = coroutine.cr_frame
stack_summary: traceback.StackSummary = traceback.extract_stack(frame)
frame_summaries.extend(stack_summary)
task_or_coroutine = coroutine.cr_await
# Format the frame summaries to warning message
if frame_summaries:
user_frame_summaries = extract_user_frame_summaries(frame_summaries)
stack_messages = traceback.format_list(user_frame_summaries)
all_stack_message = "".join(stack_messages)
task_msg = (
f"Task {task.get_name()} has been running for {elapse_time:.0f} seconds,"
f" stacktrace:\n{all_stack_message}"
)
flow_logger.warning(task_msg)
def monitor_long_running_coroutine(
loop: asyncio.AbstractEventLoop,
task_start_time: dict,
task_last_log_time: dict,
dag_manager_completed_event: threading.Event,
):
flow_logger.info("monitor_long_running_coroutine started")
logging_interval = DEFAULT_TASK_LOGGING_INTERVAL
logging_interval_in_env = os.environ.get("PF_TASK_PEEKING_INTERVAL")
if logging_interval_in_env:
try:
value = int(logging_interval_in_env)
if value <= 0:
raise ValueError
logging_interval = value
flow_logger.info(
f"Using value of PF_TASK_PEEKING_INTERVAL in environment variable as "
f"logging interval: {logging_interval_in_env}"
)
except ValueError:
flow_logger.warning(
f"Value of PF_TASK_PEEKING_INTERVAL in environment variable ('{logging_interval_in_env}') "
f"is invalid, use default value {DEFAULT_TASK_LOGGING_INTERVAL}"
)
while not dag_manager_completed_event.is_set():
running_tasks = [task for task in asyncio.all_tasks(loop) if not task.done()]
# get duration of running tasks
for task in running_tasks:
# Do not monitor the scheduler task
if task.get_name() == PF_ASYNC_NODE_SCHEDULER_EXECUTE_TASK_NAME:
continue
# Do not monitor sync tools, since they will run in executor thread and will
# be monitored by RepeatLogTimer.
task_stacks = task.get_stack()
if (
task_stacks
and task_stacks[-1].f_code
and task_stacks[-1].f_code.co_name == AsyncNodesScheduler._sync_function_to_async_task.__name__
):
continue
if task_start_time.get(task) is None:
flow_logger.warning(f"task {task.get_name()} has no start time, which should not happen")
else:
duration = time.time() - task_start_time[task]
if duration > logging_interval:
if (
task_last_log_time.get(task) is None
or time.time() - task_last_log_time[task] > logging_interval
):
log_stack_recursively(task, duration)
task_last_log_time[task] = time.time()
time.sleep(1)
def monitor_coroutine_after_cancellation(loop: asyncio.AbstractEventLoop):
"""Exit the process when all coroutines are done.
We add this function because if a sync tool is running in async mode,
the task will be cancelled after receiving SIGINT,
but the thread will not be terminated and blocks the program from exiting.
:param loop: event loop of main thread
:type loop: asyncio.AbstractEventLoop
"""
# TODO: Use environment variable to ensure it is flow test scenario to avoid unexpected exit.
# E.g. Customer is integrating Promptflow in their own code, and they want to handle SIGINT by themselves.
max_wait_seconds = os.environ.get("PF_WAIT_SECONDS_AFTER_CANCELLATION", 30)
all_tasks_are_done = False
exceeded_wait_seconds = False
thread_start_time = time.time()
flow_logger.info(f"Start to monitor coroutines after cancellation, max wait seconds: {max_wait_seconds}s")
while not all_tasks_are_done and not exceeded_wait_seconds:
# For sync tool running in async mode, the task will be cancelled,
# but the thread will not be terminated, we exit the program despite of it.
# TODO: Detect whether there is any sync tool running in async mode,
# if there is none, avoid sys.exit and let the program exit gracefully.
all_tasks_are_done = all(task.done() for task in asyncio.all_tasks(loop))
if all_tasks_are_done:
flow_logger.info("All coroutines are done. Exiting.")
# We cannot ensure persist_flow_run is called before the process exits in the case that there is
# non-daemon thread running, sleep for 3 seconds as a best effort.
# If the caller wants to ensure flow status is cancelled in storage, it should check the flow status
# after timeout and set the flow status to Cancelled.
time.sleep(3)
# Use os._exit instead of sys.exit, so that the process can stop without
# waiting for the thread created by run_in_executor to finish.
# sys.exit: https://docs.python.org/3/library/sys.html#sys.exit
# Raise a SystemExit exception, signaling an intention to exit the interpreter.
# Specifically, it does not exit non-daemon thread
# os._exit https://docs.python.org/3/library/os.html#os._exit
# Exit the process with status n, without calling cleanup handlers, flushing stdio buffers, etc.
# Specifically, it stops process without waiting for non-daemon thread.
os._exit(0)
exceeded_wait_seconds = time.time() - thread_start_time > max_wait_seconds
time.sleep(1)
if exceeded_wait_seconds:
if not all_tasks_are_done:
flow_logger.info(
f"Not all coroutines are done within {max_wait_seconds}s"
" after cancellation. Exiting the process despite of them."
" Please config the environment variable"
" PF_WAIT_SECONDS_AFTER_CANCELLATION if your tool needs"
" more time to clean up after cancellation."
)
remaining_tasks = [task for task in asyncio.all_tasks(loop) if not task.done()]
flow_logger.info(f"Remaining tasks: {[task.get_name() for task in remaining_tasks]}")
time.sleep(3)
os._exit(0)
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_dag_manager.py | import inspect
from typing import Any, Callable, Dict, List, Mapping
from promptflow._utils.logger_utils import flow_logger
from promptflow.contracts.flow import InputAssignment, InputValueType, Node
from promptflow.executor import _input_assignment_parser
class DAGManager:
def __init__(self, nodes: List[Node], flow_inputs: dict):
self._nodes = nodes
self._flow_inputs = flow_inputs
self._pending_nodes = {node.name: node for node in nodes}
self._completed_nodes_outputs = {} # node name -> output
self._bypassed_nodes = {} # node name -> node
# TODO: Validate the DAG to avoid circular dependencies
@property
def completed_nodes_outputs(self) -> Dict[str, Any]:
return self._completed_nodes_outputs
@property
def bypassed_nodes(self) -> Dict[str, Node]:
return self._bypassed_nodes
def pop_ready_nodes(self) -> List[Node]:
"""Returns a list of node names that are ready, and removes them from the list of nodes to be processed."""
ready_nodes: List[Node] = []
for node in self._pending_nodes.values():
if self._is_node_ready(node):
ready_nodes.append(node)
for node in ready_nodes:
del self._pending_nodes[node.name]
return ready_nodes
def pop_bypassable_nodes(self) -> List[Node]:
"""Returns a list of nodes that are bypassed, and removes them from the list of nodes to be processed."""
# Confirm node should be bypassed
bypassed_nodes: List[Node] = []
for node in self._pending_nodes.values():
if self._is_node_ready(node) and self._is_node_bypassable(node):
self._bypassed_nodes[node.name] = node
bypassed_nodes.append(node)
for node in bypassed_nodes:
del self._pending_nodes[node.name]
return bypassed_nodes
def get_node_valid_inputs(self, node: Node, f: Callable) -> Mapping[str, Any]:
"""Returns the valid inputs for the node, including the flow inputs, literal values and
the outputs of completed nodes. The valid inputs are determined by the function of the node.
:param node: The node for which to determine the valid inputs.
:type node: Node
:param f: The function of the current node, which is used to determine the valid inputs.
In the case when node dependency is bypassed, the input is not required when parameter has default value,
and the input is set to None when parameter has no default value.
:type f: Callable
:return: A dictionary mapping each valid input name to its value.
:rtype: dict
"""
results = {}
signature = inspect.signature(f).parameters
for name, i in (node.inputs or {}).items():
if self._is_node_dependency_bypassed(i):
# If the parameter has default value, the input will not be set so that the default value will be used.
if signature.get(name) is not None and signature[name].default is not inspect.Parameter.empty:
continue
# If the parameter has no default value, the input will be set to None so that function will not fail.
else:
flow_logger.warning(
f"The node '{i.value}' referenced by the input '{name}' of the current node '{node.name}' "
"has been bypassed, and no default value is set. Will use 'None' as the value for this input."
)
results[name] = None
else:
results[name] = self._get_node_dependency_value(i)
return results
def complete_nodes(self, nodes_outputs: Mapping[str, Any]):
"""Marks nodes as completed with the mapping from node names to their outputs."""
self._completed_nodes_outputs.update(nodes_outputs)
def completed(self) -> bool:
"""Returns True if all nodes have been processed."""
return all(
node.name in self._completed_nodes_outputs or node.name in self._bypassed_nodes for node in self._nodes
)
def _is_node_ready(self, node: Node) -> bool:
"""Returns True if the node is ready to be executed."""
node_dependencies = [i for i in node.inputs.values()]
# Add activate conditions as node dependencies
if node.activate:
node_dependencies.append(node.activate.condition)
for node_dependency in node_dependencies:
if (
node_dependency.value_type == InputValueType.NODE_REFERENCE
and node_dependency.value not in self._completed_nodes_outputs
and node_dependency.value not in self._bypassed_nodes
):
return False
return True
def _is_node_bypassable(self, node: Node) -> bool:
"""Returns True if the node should be bypassed."""
# Bypass node if the activate condition is not met
if node.activate:
# If the node referenced by activate condition is bypassed, the current node should be bypassed
if self._is_node_dependency_bypassed(node.activate.condition):
flow_logger.info(
f"The node '{node.name}' will be bypassed because it depends on the node "
f"'{node.activate.condition.value}' which has already been bypassed in the activate config."
)
return True
# If a node has activate config, we will always use this config
# to determine whether the node should be bypassed.
activate_condition = InputAssignment.serialize(node.activate.condition)
if not self._is_condition_met(node.activate.condition, node.activate.condition_value):
flow_logger.info(
f"The node '{node.name}' will be bypassed because the activate condition is not met, "
f"i.e. '{activate_condition}' is not equal to '{node.activate.condition_value}'."
)
return True
else:
flow_logger.info(
f"The node '{node.name}' will be executed because the activate condition is met, "
f"i.e. '{activate_condition}' is equal to '{node.activate.condition_value}'."
)
return False
# Bypass node if all of its node reference dependencies are bypassed
node_dependencies = [i for i in node.inputs.values() if i.value_type == InputValueType.NODE_REFERENCE]
all_dependencies_bypassed = node_dependencies and all(
self._is_node_dependency_bypassed(dependency) for dependency in node_dependencies
)
if all_dependencies_bypassed:
node_dependencies_list = [dependency.value for dependency in node_dependencies]
flow_logger.info(
f"The node '{node.name}' will be bypassed because all nodes "
f"{node_dependencies_list} it depends on are bypassed."
)
return all_dependencies_bypassed
def _is_condition_met(self, condition: InputAssignment, condition_value) -> bool:
condition = self._get_node_dependency_value(condition)
return condition == condition_value
def _get_node_dependency_value(self, node_dependency: InputAssignment):
return _input_assignment_parser.parse_value(node_dependency, self._completed_nodes_outputs, self._flow_inputs)
def _is_node_dependency_bypassed(self, dependency: InputAssignment) -> bool:
"""Returns True if the node dependency is bypassed.
There are two types of the node dependency:
1. The inputs of the node
2. The activate condition of the node
"""
return dependency.value_type == InputValueType.NODE_REFERENCE and dependency.value in self._bypassed_nodes
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/__init__.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# flake8: noqa
from .flow_executor import FlowExecutor
from .flow_validator import FlowValidator
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_script_executor.py | import asyncio
import inspect
import uuid
from pathlib import Path
from typing import Any, Callable, Mapping, Optional
from promptflow._constants import LINE_NUMBER_KEY
from promptflow._core.operation_context import OperationContext
from promptflow._core.run_tracker import RunTracker
from promptflow._core.tool_meta_generator import PythonLoadError, load_python_module_from_file
from promptflow._core.tracer import _traced, Tracer
from promptflow._utils.dataclass_serializer import convert_eager_flow_output_to_dict
from promptflow._utils.logger_utils import logger
from promptflow._utils.tool_utils import function_to_interface
from promptflow.contracts.flow import Flow
from promptflow.contracts.run_mode import RunMode
from promptflow.executor._result import LineResult
from promptflow.storage import AbstractRunStorage
from promptflow.storage._run_storage import DefaultRunStorage
from .flow_executor import FlowExecutor
class ScriptExecutor(FlowExecutor):
def __init__(
self,
flow_file: Path,
entry: str,
connections: Optional[dict] = None,
working_dir: Optional[Path] = None,
*,
storage: Optional[AbstractRunStorage] = None,
):
logger.debug(f"Start initializing the executor with {flow_file}.")
self._flow_file = flow_file
# TODO: Refine the logic here
m = load_python_module_from_file(flow_file)
func: Callable = getattr(m, entry, None)
if func is None or not inspect.isfunction(func):
raise PythonLoadError(
message_format="Failed to load python function '{entry}' from file '{flow_file}'.",
entry=entry,
flow_file=flow_file,
)
# If the function is not decorated with trace, add trace for it.
if not hasattr(func, "__original_function"):
func = _traced(func)
inputs, _, _, _ = function_to_interface(func)
self._func = func
self._inputs = {k: v.to_flow_input_definition() for k, v in inputs.items()}
self._entry = entry
self._is_async = inspect.iscoroutinefunction(self._func)
self._connections = connections
self._working_dir = Flow._resolve_working_dir(flow_file, working_dir)
self._storage = storage or DefaultRunStorage()
self._flow_id = None
self._log_interval = 60
self._line_timeout_sec = 600
def exec_line(
self,
inputs: Mapping[str, Any],
index: Optional[int] = None,
run_id: Optional[str] = None,
**kwargs,
) -> LineResult:
operation_context = OperationContext.get_instance()
operation_context.run_mode = operation_context.get("run_mode", None) or RunMode.Test.name
run_id = run_id or str(uuid.uuid4())
line_run_id = run_id if index is None else f"{run_id}_{index}"
default_flow_id = "default_flow_id"
run_tracker = RunTracker(self._storage)
run_info = run_tracker.start_flow_run(
flow_id=default_flow_id,
root_run_id=run_id,
run_id=line_run_id,
parent_run_id=run_id,
inputs=inputs,
index=index,
)
# Executor will add line_number to batch inputs if there is no line_number in the original inputs,
# which should be removed, so, we only preserve the inputs that are contained in self._inputs.
inputs = {k: inputs[k] for k in self._inputs if k in inputs}
output = None
traces = []
try:
Tracer.start_tracing(line_run_id)
if self._is_async:
output = asyncio.run(self._func(**inputs))
else:
output = self._func(**inputs)
traces = Tracer.end_tracing(line_run_id)
# Should convert output to dict before storing it to run info, since we will add key 'line_number' to it,
# so it must be a dict.
output_dict = convert_eager_flow_output_to_dict(output)
run_tracker.end_run(line_run_id, result=output_dict, traces=traces)
except Exception as e:
if not traces:
traces = Tracer.end_tracing(line_run_id)
run_tracker.end_run(line_run_id, ex=e, traces=traces)
finally:
run_tracker.persist_flow_run(run_info)
line_result = LineResult(output, {}, run_info, {})
# Return line result with index
if index is not None and isinstance(line_result.output, dict):
line_result.output[LINE_NUMBER_KEY] = index
return line_result
def enable_streaming_for_llm_flow(self, stream_required: Callable[[], bool]):
# TODO(2901157): check if eager mode should have streaming
return
def get_inputs_definition(self):
return self._inputs
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_line_execution_process_pool.py | import contextvars
import multiprocessing
import os
import queue
import signal
import sys
import threading
import time
from datetime import datetime
from functools import partial
from logging import INFO
from multiprocessing import Manager, Queue
from multiprocessing.pool import ThreadPool
from typing import List, Optional, Union
import psutil
from promptflow._constants import LINE_NUMBER_KEY, LINE_TIMEOUT_SEC
from promptflow._core._errors import ProcessPoolError, UnexpectedError
from promptflow._core.operation_context import OperationContext
from promptflow._core.run_tracker import RunTracker
from promptflow._utils.dataclass_serializer import convert_eager_flow_output_to_dict
from promptflow._utils.exception_utils import ExceptionPresenter
from promptflow._utils.logger_utils import bulk_logger
from promptflow._utils.multimedia_utils import _process_recursively, persist_multimedia_data
from promptflow._utils.thread_utils import RepeatLogTimer
from promptflow._utils.utils import get_int_env_var, log_progress, set_context
from promptflow.contracts.multimedia import Image
from promptflow.contracts.run_info import FlowRunInfo
from promptflow.contracts.run_info import RunInfo as NodeRunInfo
from promptflow.contracts.run_info import Status
from promptflow.exceptions import ErrorTarget, PromptflowException
from promptflow.executor._errors import (
BatchExecutionTimeoutError,
LineExecutionTimeoutError,
ProcessCrashError,
ProcessInfoObtainedTimeout,
ProcessTerminatedTimeout,
)
from promptflow.executor._process_manager import ForkProcessManager, SpawnProcessManager
from promptflow.executor._result import LineResult
from promptflow.executor._script_executor import ScriptExecutor
from promptflow.executor.flow_executor import DEFAULT_CONCURRENCY_BULK, FlowExecutor
from promptflow.storage import AbstractRunStorage
def signal_handler(signum, frame):
signame = signal.Signals(signum).name
bulk_logger.info("Execution stopping. Handling signal %s (%s)", signame, signum)
try:
process = psutil.Process(os.getpid())
bulk_logger.info("Successfully terminated process with pid %s", process.pid)
process.terminate()
except Exception:
bulk_logger.warning("Error when handling execution stop signal", exc_info=True)
finally:
sys.exit(1)
class QueueRunStorage(AbstractRunStorage):
"""This storage persists run info by putting it into a queue."""
def __init__(self, queue: Queue):
self.queue = queue
def persist_node_run(self, run_info: NodeRunInfo):
self.queue.put(run_info)
def persist_flow_run(self, run_info: FlowRunInfo):
self.queue.put(run_info)
def format_current_process_info(process_name, pid, line_number: int):
return f"Process name({process_name})-Process id({pid})-Line number({line_number})"
def log_process_status(process_name, pid, line_number: int, is_completed=False, is_failed=False):
process_info = format_current_process_info(process_name, pid, line_number)
if is_completed:
bulk_logger.info(f"{process_info} completed.")
elif is_failed:
bulk_logger.info(f"{process_info} failed.")
else:
bulk_logger.info(f"{process_info} start execution.")
class LineExecutionProcessPool:
_DEFAULT_WORKER_COUNT = 4
_PROCESS_TERMINATED_TIMEOUT = 60
_PROCESS_INFO_OBTAINED_TIMEOUT = 60
def __init__(
self,
flow_executor: FlowExecutor,
nlines,
run_id,
output_dir,
batch_timeout_sec: Optional[int] = None,
line_timeout_sec: Optional[int] = None,
):
self._nlines = nlines
self._run_id = run_id
multiprocessing_start_method = os.environ.get("PF_BATCH_METHOD", multiprocessing.get_start_method())
sys_start_methods = multiprocessing.get_all_start_methods()
if multiprocessing_start_method not in sys_start_methods:
bulk_logger.warning(
f"Failed to set start method to '{multiprocessing_start_method}', "
f"start method {multiprocessing_start_method} is not in: {sys_start_methods}."
)
bulk_logger.info(f"Set start method to default {multiprocessing.get_start_method()}.")
multiprocessing_start_method = multiprocessing.get_start_method()
use_fork = multiprocessing_start_method in ["fork", "forkserver"]
self._flow_file = flow_executor._flow_file
self._connections = flow_executor._connections
self._working_dir = flow_executor._working_dir
self._use_fork = use_fork
if isinstance(flow_executor, ScriptExecutor):
self._storage = flow_executor._storage
else:
self._storage = flow_executor._run_tracker._storage
self._flow_id = flow_executor._flow_id
self._log_interval = flow_executor._log_interval
self._line_timeout_sec = line_timeout_sec or LINE_TIMEOUT_SEC
self._batch_timeout_sec = batch_timeout_sec
self._output_dir = output_dir
self._flow_create_kwargs = {
"flow_file": flow_executor._flow_file,
"connections": flow_executor._connections,
"working_dir": flow_executor._working_dir,
"entry": flow_executor._entry,
"line_timeout_sec": self._line_timeout_sec,
"raise_ex": False,
}
def __enter__(self):
manager = Manager()
self._processing_idx = manager.dict()
self._completed_idx = manager.dict()
self._task_queue = Queue()
self._n_process = self._determine_worker_count()
# When using fork, we first spawn a sub process, the SemLock created in fork context (multiprocessing.Queue())
# can't used in a spawn context. Since spawn does not share memory, synchronization primitives created by
# fork cannot be used directly. It will cause an error: "A SemLock created in a fork context is being
# shared with a process in a spawn context. This is not supported".
# So use multiprocessing.Manager().Queue() instead of multiprocessing.Queue().
# Manager().Queue() operates through a manager server process, which passes messages between different
# processes without directly sharing memory state, which makes it safe to use in a spawn context.
self._input_queues = [manager.Queue() for _ in range(self._n_process)]
self._output_queues = [manager.Queue() for _ in range(self._n_process)]
self._control_signal_queue = manager.Queue()
self._process_info = manager.dict()
# when using fork, we first create a process with spawn method to establish a clean environment
# Then fork the subprocess in this environment to avoid some deadlock problems
common_kwargs = {
"input_queues": self._input_queues,
"output_queues": self._output_queues,
"process_info": self._process_info,
"process_target_func": _process_wrapper,
}
if self._use_fork:
# 1. Create input_queue, output_queue, control_signal_queue and _process_info in the main process.
# 2. Pass the above queue/dict as parameters to spawn and fork processes to transfer information
# between processes.
self._processes_manager = ForkProcessManager(
self._control_signal_queue,
self._flow_create_kwargs,
**common_kwargs,
)
else:
executor_creation_func = partial(FlowExecutor.create, **self._flow_create_kwargs)
# 1. Create input_queue, output_queue, and _process_info in the main process.
# 2. Spawn _n_process sub-process and pass the above queue/dict to these sub-process to transfer information
# between main process and sub process.
self._processes_manager = SpawnProcessManager(executor_creation_func, **common_kwargs)
self._processes_manager.start_processes()
monitor_pool = ThreadPool(self._n_process, initializer=set_context, initargs=(contextvars.copy_context(),))
self._monitor_pool = monitor_pool
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._monitor_pool is not None:
self._monitor_pool.close()
self._monitor_pool.join()
def _get_process_info(self, index):
start_time = time.time()
while True:
try:
if time.time() - start_time > self._PROCESS_INFO_OBTAINED_TIMEOUT:
raise ProcessInfoObtainedTimeout(self._PROCESS_INFO_OBTAINED_TIMEOUT)
# Try to get process id and name from the process_info
process_id = self._process_info[index].process_id
process_name = self._process_info[index].process_name
return (index, process_id, process_name)
except KeyError:
# If the process_info does not exist for the given index, it means the process have not ready yet,
# try again.
time.sleep(1)
continue
except Exception as e:
bulk_logger.warning(f"Unexpected error occurred while get process info. Exception: {e}")
def _ensure_process_terminated_within_timeout(self, process_id):
start_time = time.time()
while psutil.pid_exists(process_id):
if time.time() - start_time > self._PROCESS_TERMINATED_TIMEOUT:
raise ProcessTerminatedTimeout(self._PROCESS_TERMINATED_TIMEOUT)
time.sleep(1)
def _is_process_alive(self, process_id):
return psutil.pid_exists(process_id)
def _handle_output_queue_messages(self, output_queue: Queue, result_list):
try:
message = output_queue.get(timeout=1)
if isinstance(message, LineResult):
message = self._process_multimedia(message)
result_list.append(message)
return message
elif isinstance(message, FlowRunInfo):
self._storage.persist_flow_run(message)
return message
elif isinstance(message, NodeRunInfo):
self._storage.persist_node_run(message)
return message
except queue.Empty:
pass
return None
def _monitor_workers_and_process_tasks_in_thread(
self, task_queue: Queue, result_list: List[LineResult], index: int, input_queue: Queue, output_queue: Queue
):
index, process_id, process_name = self._get_process_info(index)
batch_start_time = datetime.utcnow()
# Entering the while loop requires two conditions:
# 1. The task queue is not empty, meaning there are lines yet to be executed.
# 2. The batch run has not reached the batch timeout limit.
while not self._batch_timeout_expired(batch_start_time):
try:
args = task_queue.get(timeout=1)
except queue.Empty:
break
# Put task into input_queue
input_queue.put(args)
inputs, line_number, run_id = args
self._processing_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number)
start_time = datetime.utcnow()
completed = False
crashed = False
returned_node_run_infos = {}
# Responsible for checking the output queue messages and processing them within a specified timeout period.
while not self._line_timeout_expired(start_time) and not self._batch_timeout_expired(batch_start_time):
# Monitor process aliveness.
crashed = not self._is_process_alive(process_id)
if crashed:
break
# Handle output queue message.
message = self._handle_output_queue_messages(output_queue, result_list)
if isinstance(message, LineResult):
completed = True
break
if isinstance(message, NodeRunInfo):
returned_node_run_infos[message.node] = message
# Handle line execution completed.
if completed:
self._completed_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number, is_completed=True)
# Handle line execution is not completed.
else:
ex = None
# Handle process crashed.
if crashed:
bulk_logger.warning(f"Process crashed while executing line {line_number}.")
ex = ProcessCrashError(line_number)
# Handle line execution timeout.
elif self._line_timeout_expired(start_time):
bulk_logger.warning(f"Line {line_number} timeout after {self._line_timeout_sec} seconds.")
ex = LineExecutionTimeoutError(line_number, self._line_timeout_sec)
# Handle batch execution timeout.
elif self._batch_timeout_expired(batch_start_time):
bulk_logger.warning(
f"Line {line_number} execution terminated due to the total "
f"batch run exceeding the batch timeout ({self._batch_timeout_sec}s)."
)
ex = BatchExecutionTimeoutError(line_number, self._batch_timeout_sec)
else:
# This branch should not be reached, add this warning for the case.
msg = f"Unexpected error occurred while monitoring line execution at line {line_number}."
bulk_logger.warning(msg)
ex = UnexpectedError(msg)
result = self._generate_line_result_for_exception(
inputs,
run_id,
line_number,
self._flow_id,
start_time,
ex,
returned_node_run_infos,
)
result_list.append(result)
self._completed_idx[line_number] = format_current_process_info(process_name, process_id, line_number)
log_process_status(process_name, process_id, line_number, is_failed=True)
# If there are still tasks in the task_queue and the batch run does not exceed the batch timeout,
# restart a new process to execute the task.
run_finished = task_queue.empty() or self._batch_timeout_expired(batch_start_time)
if not run_finished:
self._processes_manager.restart_process(index)
# We need to ensure the process has been killed before continuing to execute.
# Otherwise the process will receive new task, and during the execution, the process
# is killed, which will result in the 'ProcessCrashError'.
self._ensure_process_terminated_within_timeout(process_id)
index, process_id, process_name = self._get_process_info(index)
self._processing_idx.pop(line_number)
# End the process when the batch timeout is exceeded or when all lines have been executed.
self._processes_manager.end_process(index)
# In fork mode, the main process and the sub spawn process communicate through _process_info.
# We need to ensure the process has been killed before returning. Otherwise, it may cause
# the main process have exited but the spawn process is still alive.
# At this time, a connection error will be reported.
self._ensure_process_terminated_within_timeout(process_id)
def _batch_timeout_expired(self, start_time: datetime) -> bool:
if self._batch_timeout_sec is None:
return False
return (datetime.utcnow() - start_time).total_seconds() > self._batch_timeout_sec + 10
def _line_timeout_expired(self, start_time: datetime) -> bool:
# Here we add more seconds because of the following reasons:
# 1. At the last second, there would be several timeout message from exec_line.
# 2. It may take time to create worker so actual timeout time may be longer.
return (datetime.utcnow() - start_time).total_seconds() > self._line_timeout_sec + 10
def _process_multimedia(self, result: LineResult) -> LineResult:
"""Replace multimedia data in line result with string place holder to prevent OOM
and persist multimedia data in output when batch running."""
if not self._output_dir:
return result
self._process_multimedia_in_flow_run(result.run_info)
for node_name, node_run_info in result.node_run_infos.items():
result.node_run_infos[node_name] = self._process_multimedia_in_node_run(node_run_info)
result.output = persist_multimedia_data(result.output, self._output_dir)
return result
def _process_multimedia_in_run_info(self, run_info: Union[FlowRunInfo, NodeRunInfo]):
# Persist and convert images in inputs to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.inputs:
run_info.inputs = self._persist_and_convert_images_to_path_dicts(run_info.inputs)
# Persist and convert images in output to path dictionaries.
# This replaces any image objects with their corresponding file path dictionaries.
if run_info.output:
serialized_output = self._persist_and_convert_images_to_path_dicts(run_info.output)
run_info.output = serialized_output
run_info.result = None
# Persist and convert images in api_calls to path dictionaries.
# The `inplace=True` parameter is used here to ensure that the original list structure holding generator outputs
# is maintained. This allows us to keep tracking the list as it dynamically changes when the generator is
# consumed. It is crucial to process the api_calls list in place to avoid losing the reference to the list that
# holds the generator items, which is essential for tracing generator execution.
if run_info.api_calls:
run_info.api_calls = self._persist_and_convert_images_to_path_dicts(run_info.api_calls, inplace=True)
return run_info
def _process_multimedia_in_flow_run(self, run_info: FlowRunInfo):
self._process_multimedia_in_run_info(run_info)
def _process_multimedia_in_node_run(self, run_info: NodeRunInfo):
run_info = self._process_multimedia_in_run_info(run_info)
return run_info
def _persist_and_convert_images_to_path_dicts(self, value, inplace=False):
serialization_funcs = {Image: partial(Image.serialize, **{"encoder": None})}
return _process_recursively(value, process_funcs=serialization_funcs, inplace=inplace)
def _generate_line_result_for_exception(
self,
inputs,
run_id,
line_number,
flow_id,
start_time,
ex,
node_run_infos={},
) -> LineResult:
bulk_logger.error(f"Line {line_number}, Process {os.getpid()} failed with exception: {ex}")
run_info = FlowRunInfo(
run_id=f"{run_id}_{line_number}",
status=Status.Failed,
error=ExceptionPresenter.create(ex).to_dict(include_debug_info=True),
inputs=inputs,
output=None,
metrics=None,
request=None,
parent_run_id=run_id,
root_run_id=run_id,
source_run_id=None,
flow_id=flow_id,
start_time=start_time,
end_time=datetime.utcnow(),
index=line_number,
)
result = LineResult(
output={},
aggregation_inputs={},
run_info=run_info,
node_run_infos=node_run_infos,
)
# TODO: There is a corner case that the run info is persisted in the subprocess when timeouted,
# while we also persist the run info here. This may cause duplicate run info in the storage.
# We need to find a way to avoid this.
self._storage.persist_flow_run(result.run_info)
return result
def run(self, batch_inputs):
for index, inputs in batch_inputs:
self._task_queue.put(
(
inputs,
index,
self._run_id,
)
)
result_list = []
run_start_time = datetime.utcnow()
with RepeatLogTimer(
interval_seconds=self._log_interval,
logger=bulk_logger,
level=INFO,
log_message_function=self._generate_thread_status_messages,
args=(
self._monitor_pool,
self._nlines,
),
):
try:
args_list = [
(
self._task_queue, # Shared task queue for all sub processes to read the input data.
result_list, # Line result list of the batch run.
i, # Index of the sub process.
# Specific input queue for sub process, used to send input data to it.
self._input_queues[i],
# Specific output queue for the sub process, used to receive results from it.
self._output_queues[i],
)
for i in range(self._n_process)
]
# The variable 'async_result' here is not the actual result of the batch run
# but an AsyncResult object that can be used to check if the execution are finished
# The actual results of the batch run are stored in 'result_list'
# Create _n_process monitoring threads, mainly used to assign tasks and receive line result.
# When task_queue is empty, end the process.
# When line execution timeout or process crash, restart the process.
async_result = self._monitor_pool.starmap_async(
self._monitor_workers_and_process_tasks_in_thread, args_list
)
try:
# Only log when the number of results changes to avoid duplicate logging.
last_log_count = 0
# Wait for batch run to complete or KeyboardInterrupt
while not async_result.ready():
current_result_count = len(result_list)
if current_result_count != last_log_count:
log_progress(
run_start_time=run_start_time,
logger=bulk_logger,
count=len(result_list),
total_count=self._nlines,
)
last_log_count = current_result_count
# Check every 1 second
async_result.wait(1)
# To ensure exceptions in thread-pool calls are propagated to the main process for proper handling
# The exceptions raised will be re-raised by the get() method.
# Related link:
# https://docs.python.org/3/library/multiprocessing.html#multiprocessing.pool.AsyncResult
async_result.get()
except KeyboardInterrupt:
raise
except PromptflowException:
raise
except Exception as e:
bulk_logger.error(f"Process {os.getpid()} failed with exception: {e}")
raise ProcessPoolError(
message_format=f"Process {os.getpid()} failed with exception: {e}",
target=ErrorTarget.EXECUTOR,
) from e
return result_list
def _generate_thread_status_messages(self, pool: ThreadPool, total_count: int):
msgs = []
active_threads = sum(thread.is_alive() for thread in pool._pool)
msgs.append(f"[Process Pool] [Active processes: {active_threads} / {len(pool._pool)}]")
processing_lines_copy = self._processing_idx.copy()
completed_lines_copy = self._completed_idx.copy()
msgs.append(
f"[Lines] [Finished: {len(completed_lines_copy)}] [Processing: {len(processing_lines_copy)}] "
f"[Pending: {total_count - len(processing_lines_copy) - len(completed_lines_copy)}]"
)
lines = []
for idx, thread_name in sorted(processing_lines_copy.items()):
lines.append(f"line {idx} ({thread_name})")
if len(lines) > 0:
msgs.append("Processing Lines: " + ", ".join(lines) + ".")
return msgs
def _determine_worker_count(self):
worker_count = get_int_env_var("PF_WORKER_COUNT")
# Starting a new process in non-fork mode requires to allocate memory. Calculate the maximum number of processes
# based on available memory to avoid memory bursting.
estimated_available_worker_count = get_available_max_worker_count() if not self._use_fork else None
# If the environment variable PF_WORKER_COUNT exists and valid, use the value as the worker_count.
if worker_count is not None and worker_count > 0:
self._log_set_worker_count(worker_count, estimated_available_worker_count)
return worker_count
# If the environment variable PF_WORKER_COUNT is not set or invalid, take the minimum value among the
# factors: default_worker_count, row_count and estimated_worker_count_based_on_memory_usage
factors = {
"default_worker_count": self._DEFAULT_WORKER_COUNT,
"row_count": self._nlines,
"estimated_worker_count_based_on_memory_usage": estimated_available_worker_count,
}
valid_factors = {k: v for k, v in factors.items() if v is not None and v > 0}
# Take the minimum value as the result
worker_count = min(valid_factors.values())
bulk_logger.info(
f"Set process count to {worker_count} by taking the minimum value among the factors of {valid_factors}."
)
return worker_count
def _log_set_worker_count(self, worker_count, estimated_available_worker_count):
bulk_logger.info(f"Set process count to {worker_count} with the environment variable 'PF_WORKER_COUNT'.")
if estimated_available_worker_count is not None and estimated_available_worker_count < worker_count:
bulk_logger.warning(
f"The current process count ({worker_count}) is larger than recommended process count "
f"({estimated_available_worker_count}) that estimated by system available memory. This may "
f"cause memory exhaustion"
)
def _exec_line(executor: FlowExecutor, output_queue: Queue, *, inputs: dict, run_id, index: int):
try:
line_result = executor.exec_line(
inputs=inputs,
run_id=run_id,
index=index,
node_concurrency=DEFAULT_CONCURRENCY_BULK,
)
if line_result is not None:
# For eager flow, the output may be a dataclass which is not picklable, we need to convert it to dict.
if not isinstance(line_result.output, dict):
line_result.output = convert_eager_flow_output_to_dict(line_result.output)
line_result.output.pop(LINE_NUMBER_KEY, None)
# TODO: Put serialized line result into queue to catch serialization error beforehand.
# Otherwise it might cause the process to hang, e.g, line failed because output is not seralizable.
if line_result is not None and line_result.run_info.status == Status.Failed:
line_result.output = {}
return line_result
except Exception as e:
bulk_logger.error(f"Line {index}, Process {os.getpid()} failed with exception: {e}")
flow_id = executor._flow_id
line_run_id = run_id if index is None else f"{run_id}_{index}"
# If line execution failed before start, there is no flow information in the run_tracker.
# So we call start_flow_run before handling exception to make sure the run_tracker has flow info.
if isinstance(executor, ScriptExecutor):
run_tracker = RunTracker(executor._storage)
else:
run_tracker = executor._run_tracker
run_tracker.start_flow_run(flow_id, run_id, line_run_id, run_id)
run_info = run_tracker.end_run(f"{run_id}_{index}", ex=e)
output_queue.put(run_info)
result = LineResult(
output={},
aggregation_inputs={},
run_info=run_info,
node_run_infos={},
)
return result
def _process_wrapper(
executor_creation_func,
input_queue: Queue,
output_queue: Queue,
log_context_initialization_func,
operation_contexts_dict: dict,
):
if threading.current_thread() is threading.main_thread():
signal.signal(signal.SIGINT, signal_handler)
else:
bulk_logger.info("Current thread is not main thread, skip signal handler registration in batch process pool.")
OperationContext.get_instance().update(operation_contexts_dict) # Update the operation context for the new process.
if log_context_initialization_func:
with log_context_initialization_func():
exec_line_for_queue(executor_creation_func, input_queue, output_queue)
else:
exec_line_for_queue(executor_creation_func, input_queue, output_queue)
def create_executor_fork(*, flow_executor: FlowExecutor, storage: AbstractRunStorage):
if isinstance(flow_executor, ScriptExecutor):
return ScriptExecutor(
flow_file=flow_executor._flow_file,
entry=flow_executor._entry,
connections=flow_executor._connections,
working_dir=flow_executor._working_dir,
storage=storage,
)
else:
run_tracker = RunTracker(run_storage=storage)
return FlowExecutor(
flow=flow_executor._flow,
connections=flow_executor._connections,
run_tracker=run_tracker,
cache_manager=flow_executor._cache_manager,
loaded_tools=flow_executor._loaded_tools,
raise_ex=False,
line_timeout_sec=flow_executor._line_timeout_sec,
)
def exec_line_for_queue(executor_creation_func, input_queue: Queue, output_queue: Queue):
run_storage = QueueRunStorage(output_queue)
executor: FlowExecutor = executor_creation_func(storage=run_storage)
while True:
try:
inputs, line_number, run_id = input_queue.get(timeout=1)
result = _exec_line(
executor=executor,
output_queue=output_queue,
inputs=inputs,
run_id=run_id,
index=line_number,
)
output_queue.put(result)
except queue.Empty:
# Do nothing until the input_queue have content or process is killed
# TODO: Exit the process more gracefully.
pass
def get_available_max_worker_count():
pid = os.getpid()
mem_info = psutil.virtual_memory()
available_memory = mem_info.available / (1024 * 1024) # in MB
process = psutil.Process(pid)
process_memory_info = process.memory_info()
process_memory = process_memory_info.rss / (1024 * 1024) # in MB
estimated_available_worker_count = int(available_memory // process_memory)
if estimated_available_worker_count < 1:
# TODO: For the case of vector db, Optimize execution logic
# 1. Let the main process not consume memory because it does not actually invoke
# 2. When the degree of parallelism is 1, main process executes the task directly and not
# create the child process
bulk_logger.warning(
f"Current system's available memory is {available_memory}MB, less than the memory "
f"{process_memory}MB required by the process. The maximum available worker count is 1."
)
estimated_available_worker_count = 1
else:
bulk_logger.info(
f"Current system's available memory is {available_memory}MB, "
f"memory consumption of current process is {process_memory}MB, "
f"estimated available worker count is {available_memory}/{process_memory} "
f"= {estimated_available_worker_count}"
)
return estimated_available_worker_count
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_tool_resolver.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import copy
import inspect
import types
from dataclasses import dataclass
from functools import partial
from pathlib import Path
from typing import Callable, List, Optional
from promptflow._core._errors import InvalidSource
from promptflow._core.connection_manager import ConnectionManager
from promptflow._core.tool import STREAMING_OPTION_PARAMETER_ATTR
from promptflow._core.tools_manager import BuiltinsManager, ToolLoader, connection_type_to_api_mapping
from promptflow._utils.multimedia_utils import create_image, load_multimedia_data_recursively
from promptflow._utils.tool_utils import get_inputs_for_prompt_template, get_prompt_param_name_from_func
from promptflow._utils.yaml_utils import load_yaml
from promptflow.contracts.flow import InputAssignment, InputValueType, Node, ToolSourceType
from promptflow.contracts.tool import ConnectionType, Tool, ToolType, ValueType
from promptflow.contracts.types import AssistantDefinition, PromptTemplate
from promptflow.exceptions import ErrorTarget, PromptflowException, UserErrorException
from promptflow.executor._errors import (
ConnectionNotFound,
EmptyLLMApiMapping,
InvalidConnectionType,
InvalidCustomLLMTool,
NodeInputValidationError,
ResolveToolError,
ValueTypeUnresolved,
)
@dataclass
class ResolvedTool:
node: Node
definition: Tool
callable: Callable
init_args: dict
class ToolResolver:
def __init__(
self,
working_dir: Path,
connections: Optional[dict] = None,
package_tool_keys: Optional[List[str]] = None,
):
try:
# Import openai and aoai for llm tool
from promptflow.tools import aoai, openai # noqa: F401
except ImportError:
pass
self._tool_loader = ToolLoader(working_dir, package_tool_keys=package_tool_keys)
self._working_dir = working_dir
self._connection_manager = ConnectionManager(connections)
@classmethod
def start_resolver(
cls, working_dir: Path, connections: Optional[dict] = None, package_tool_keys: Optional[List[str]] = None
):
resolver = cls(working_dir, connections, package_tool_keys)
resolver._activate_in_context(force=True)
return resolver
def _convert_to_connection_value(self, k: str, v: InputAssignment, node: Node, conn_types: List[ValueType]):
connection_value = self._connection_manager.get(v.value)
if not connection_value:
raise ConnectionNotFound(f"Connection {v.value} not found for node {node.name!r} input {k!r}.")
# Check if type matched
if not any(type(connection_value).__name__ == typ for typ in conn_types):
msg = (
f"Input '{k}' for node '{node.name}' of type {type(connection_value).__name__!r}"
f" is not supported, valid types {conn_types}."
)
raise NodeInputValidationError(message=msg)
return connection_value
def _convert_to_custom_strong_type_connection_value(
self, k: str, v: InputAssignment, node: Node, tool: Tool, conn_types: List[str], module: types.ModuleType
):
if not conn_types:
msg = f"Input '{k}' for node '{node.name}' has invalid types: {conn_types}."
raise NodeInputValidationError(message=msg)
connection_value = self._connection_manager.get(v.value)
if not connection_value:
raise ConnectionNotFound(f"Connection {v.value} not found for node {node.name!r} input {k!r}.")
custom_defined_connection_class_name = conn_types[0]
if node.source.type == ToolSourceType.Package:
module = tool.module
return connection_value._convert_to_custom_strong_type(
module=module, to_class=custom_defined_connection_class_name
)
def _convert_to_assistant_definition(self, assistant_definition_path: str, input_name: str, node_name: str):
if assistant_definition_path is None or not (self._working_dir / assistant_definition_path).is_file():
raise InvalidSource(
target=ErrorTarget.EXECUTOR,
message_format="Input '{input_name}' for node '{node_name}' of value '{source_path}' "
"is not a valid path.",
input_name=input_name,
source_path=assistant_definition_path,
node_name=node_name,
)
file = self._working_dir / assistant_definition_path
with open(file, "r", encoding="utf-8") as file:
assistant_definition = load_yaml(file)
return AssistantDefinition.deserialize(assistant_definition)
def _convert_node_literal_input_types(self, node: Node, tool: Tool, module: types.ModuleType = None):
updated_inputs = {
k: v
for k, v in node.inputs.items()
if (v.value is not None and v.value != "") or v.value_type != InputValueType.LITERAL
}
for k, v in updated_inputs.items():
if v.value_type != InputValueType.LITERAL:
continue
tool_input = tool.inputs.get(k)
if tool_input is None: # For kwargs input, tool_input is None.
continue
value_type = tool_input.type[0]
updated_inputs[k] = InputAssignment(value=v.value, value_type=InputValueType.LITERAL)
if ConnectionType.is_connection_class_name(value_type):
if tool_input.custom_type:
updated_inputs[k].value = self._convert_to_custom_strong_type_connection_value(
k, v, node, tool, tool_input.custom_type, module=module
)
else:
updated_inputs[k].value = self._convert_to_connection_value(k, v, node, tool_input.type)
elif value_type == ValueType.IMAGE:
try:
updated_inputs[k].value = create_image(v.value)
except Exception as e:
error_type_and_message = f"({e.__class__.__name__}) {e}"
raise NodeInputValidationError(
message_format="Failed to load image for input '{key}': {error_type_and_message}",
key=k,
error_type_and_message=error_type_and_message,
target=ErrorTarget.EXECUTOR,
) from e
elif value_type == ValueType.ASSISTANT_DEFINITION:
try:
updated_inputs[k].value = self._convert_to_assistant_definition(v.value, k, node.name)
except Exception as e:
error_type_and_message = f"({e.__class__.__name__}) {e}"
raise NodeInputValidationError(
message_format="Failed to load assistant definition from input '{key}': "
"{error_type_and_message}",
key=k,
error_type_and_message=error_type_and_message,
target=ErrorTarget.EXECUTOR,
) from e
elif isinstance(value_type, ValueType):
try:
updated_inputs[k].value = value_type.parse(v.value)
except Exception as e:
raise NodeInputValidationError(
message_format="Input '{key}' for node '{node_name}' of value '{value}' is not "
"type {value_type}.",
key=k,
node_name=node.name,
value=v.value,
value_type=value_type.value,
target=ErrorTarget.EXECUTOR,
) from e
try:
updated_inputs[k].value = load_multimedia_data_recursively(updated_inputs[k].value)
except Exception as e:
error_type_and_message = f"({e.__class__.__name__}) {e}"
raise NodeInputValidationError(
message_format="Failed to load image for input '{key}': {error_type_and_message}",
key=k,
error_type_and_message=error_type_and_message,
target=ErrorTarget.EXECUTOR,
) from e
else:
# The value type is in ValueType enum or is connection type. null connection has been handled before.
raise ValueTypeUnresolved(
f"Unresolved input type {value_type!r}, please check if it is supported in current version.",
target=ErrorTarget.EXECUTOR,
)
updated_node = copy.deepcopy(node)
updated_node.inputs = updated_inputs
return updated_node
def resolve_tool_by_node(self, node: Node, convert_input_types=True) -> ResolvedTool:
try:
if node.source is None:
raise UserErrorException(f"Node {node.name} does not have source defined.")
if node.type is ToolType.PYTHON:
if node.source.type == ToolSourceType.Package:
return self._resolve_package_node(node, convert_input_types=convert_input_types)
elif node.source.type == ToolSourceType.Code:
return self._resolve_script_node(node, convert_input_types=convert_input_types)
raise NotImplementedError(f"Tool source type {node.source.type} for python tool is not supported yet.")
elif node.type is ToolType.PROMPT:
return self._resolve_prompt_node(node)
elif node.type is ToolType.LLM:
return self._resolve_llm_node(node, convert_input_types=convert_input_types)
elif node.type is ToolType.CUSTOM_LLM:
if node.source.type == ToolSourceType.PackageWithPrompt:
resolved_tool = self._resolve_package_node(node, convert_input_types=convert_input_types)
return self._integrate_prompt_in_package_node(resolved_tool)
raise NotImplementedError(
f"Tool source type {node.source.type} for custom_llm tool is not supported yet."
)
else:
raise NotImplementedError(f"Tool type {node.type} is not supported yet.")
except Exception as e:
if isinstance(e, PromptflowException) and e.target != ErrorTarget.UNKNOWN:
raise ResolveToolError(node_name=node.name, target=e.target, module=e.module) from e
raise ResolveToolError(node_name=node.name) from e
def _load_source_content(self, node: Node) -> str:
source = node.source
# If is_file returns True, the path points to a existing file, so we don't need to check if exists.
if source is None or source.path is None or not (self._working_dir / source.path).is_file():
raise InvalidSource(
target=ErrorTarget.EXECUTOR,
message_format="Node source path '{source_path}' is invalid on node '{node_name}'.",
source_path=source.path if source is not None else None,
node_name=node.name,
)
file = self._working_dir / source.path
return file.read_text(encoding="utf-8")
def _validate_duplicated_inputs(self, prompt_tpl_inputs: list, tool_params: list, msg: str):
duplicated_inputs = set(prompt_tpl_inputs) & set(tool_params)
if duplicated_inputs:
raise NodeInputValidationError(
message=msg.format(duplicated_inputs=duplicated_inputs),
target=ErrorTarget.EXECUTOR,
)
def _load_images_for_prompt_tpl(self, prompt_tpl_inputs_mapping: dict, node_inputs: dict):
for input_name, input in prompt_tpl_inputs_mapping.items():
if ValueType.IMAGE in input.type and input_name in node_inputs:
if node_inputs[input_name].value_type == InputValueType.LITERAL:
node_inputs[input_name].value = create_image(node_inputs[input_name].value)
return node_inputs
def _resolve_prompt_node(self, node: Node) -> ResolvedTool:
prompt_tpl = self._load_source_content(node)
prompt_tpl_inputs_mapping = get_inputs_for_prompt_template(prompt_tpl)
from promptflow.tools.template_rendering import render_template_jinja2
params = inspect.signature(render_template_jinja2).parameters
param_names = [name for name, param in params.items() if param.kind != inspect.Parameter.VAR_KEYWORD]
msg = (
f"Invalid inputs {{duplicated_inputs}} in prompt template of node {node.name}. "
f"These inputs are duplicated with the reserved parameters of prompt tool."
)
self._validate_duplicated_inputs(prompt_tpl_inputs_mapping.keys(), param_names, msg)
node.inputs = self._load_images_for_prompt_tpl(prompt_tpl_inputs_mapping, node.inputs)
callable = partial(render_template_jinja2, template=prompt_tpl)
return ResolvedTool(node=node, definition=None, callable=callable, init_args={})
@staticmethod
def _remove_init_args(node_inputs: dict, init_args: dict):
for k in init_args:
if k in node_inputs:
del node_inputs[k]
def _get_node_connection(self, node: Node):
connection = self._connection_manager.get(node.connection)
if connection is None:
raise ConnectionNotFound(
message=f"Connection {node.connection!r} not found, available connection keys "
f"{self._connection_manager._connections.keys()}.",
target=ErrorTarget.EXECUTOR,
)
return connection
def _resolve_llm_node(self, node: Node, convert_input_types=False) -> ResolvedTool:
connection = self._get_node_connection(node)
if not node.provider:
if not connection_type_to_api_mapping:
raise EmptyLLMApiMapping()
# If provider is not specified, try to resolve it from connection type
connection_type = type(connection).__name__
if connection_type not in connection_type_to_api_mapping:
raise InvalidConnectionType(
message_format="Connection type {conn_type} is not supported for LLM.",
conn_type=connection_type,
)
node.provider = connection_type_to_api_mapping[connection_type]
tool: Tool = self._tool_loader.load_tool_for_llm_node(node)
key, connection = self._resolve_llm_connection_to_inputs(node, tool)
updated_node = copy.deepcopy(node)
updated_node.inputs[key] = InputAssignment(value=connection, value_type=InputValueType.LITERAL)
if convert_input_types:
updated_node = self._convert_node_literal_input_types(updated_node, tool)
prompt_tpl = self._load_source_content(node)
prompt_tpl_inputs_mapping = get_inputs_for_prompt_template(prompt_tpl)
msg = (
f"Invalid inputs {{duplicated_inputs}} in prompt template of node {node.name}. "
f"These inputs are duplicated with the parameters of {node.provider}.{node.api}."
)
self._validate_duplicated_inputs(prompt_tpl_inputs_mapping.keys(), tool.inputs.keys(), msg)
updated_node.inputs = self._load_images_for_prompt_tpl(prompt_tpl_inputs_mapping, updated_node.inputs)
api_func, init_args = BuiltinsManager._load_package_tool(
tool.name, tool.module, tool.class_name, tool.function, updated_node.inputs
)
self._remove_init_args(updated_node.inputs, init_args)
prompt_tpl_param_name = get_prompt_param_name_from_func(api_func)
api_func = partial(api_func, **{prompt_tpl_param_name: prompt_tpl}) if prompt_tpl_param_name else api_func
return ResolvedTool(updated_node, tool, api_func, init_args)
def _resolve_llm_connection_to_inputs(self, node: Node, tool: Tool) -> Node:
connection = self._get_node_connection(node)
for key, input in tool.inputs.items():
if ConnectionType.is_connection_class_name(input.type[0]):
if type(connection).__name__ not in input.type:
msg = (
f"Invalid connection '{node.connection}' type {type(connection).__name__!r} "
f"for node '{node.name}', valid types {input.type}."
)
raise InvalidConnectionType(message=msg)
return key, connection
raise InvalidConnectionType(
message_format="Connection type can not be resolved for tool {tool_name}", tool_name=tool.name
)
def _resolve_script_node(self, node: Node, convert_input_types=False) -> ResolvedTool:
m, tool = self._tool_loader.load_tool_for_script_node(node)
# We only want to load script tool module once.
# Reloading the same module changes the ID of the class, which can cause issues with isinstance() checks.
# This is important when working with connection class checks. For instance, in user tool script it writes:
# isinstance(conn, MyCustomConnection)
# Custom defined script tool and custom defined strong type connection are in the same module.
# The first time to load the module is in above line when loading a tool.
# We need the module again when converting the custom connection to strong type when converting input types.
# To avoid reloading, pass the loaded module to _convert_node_literal_input_types as an arg.
if convert_input_types:
node = self._convert_node_literal_input_types(node, tool, m)
callable, init_args = BuiltinsManager._load_tool_from_module(
m, tool.name, tool.module, tool.class_name, tool.function, node.inputs
)
self._remove_init_args(node.inputs, init_args)
return ResolvedTool(node=node, definition=tool, callable=callable, init_args=init_args)
def _resolve_package_node(self, node: Node, convert_input_types=False) -> ResolvedTool:
tool: Tool = self._tool_loader.load_tool_for_package_node(node)
updated_node = copy.deepcopy(node)
if convert_input_types:
updated_node = self._convert_node_literal_input_types(updated_node, tool)
callable, init_args = BuiltinsManager._load_package_tool(
tool.name, tool.module, tool.class_name, tool.function, updated_node.inputs
)
self._remove_init_args(updated_node.inputs, init_args)
return ResolvedTool(node=updated_node, definition=tool, callable=callable, init_args=init_args)
def _integrate_prompt_in_package_node(self, resolved_tool: ResolvedTool):
node = resolved_tool.node
prompt_tpl = PromptTemplate(self._load_source_content(node))
prompt_tpl_inputs_mapping = get_inputs_for_prompt_template(prompt_tpl)
msg = (
f"Invalid inputs {{duplicated_inputs}} in prompt template of node {node.name}. "
f"These inputs are duplicated with the inputs of custom llm tool."
)
self._validate_duplicated_inputs(prompt_tpl_inputs_mapping.keys(), resolved_tool.definition.inputs.keys(), msg)
node.inputs = self._load_images_for_prompt_tpl(prompt_tpl_inputs_mapping, node.inputs)
callable = resolved_tool.callable
prompt_tpl_param_name = get_prompt_param_name_from_func(callable)
if prompt_tpl_param_name is None:
raise InvalidCustomLLMTool(
f"Invalid Custom LLM tool {resolved_tool.definition.name}: "
f"function {callable.__name__} is missing a prompt template argument.",
target=ErrorTarget.EXECUTOR,
)
resolved_tool.callable = partial(callable, **{prompt_tpl_param_name: prompt_tpl})
# Copy the attributes to make sure they are still available after partial.
attributes_to_set = [STREAMING_OPTION_PARAMETER_ATTR]
for attr in attributes_to_set:
attr_val = getattr(callable, attr, None)
if attr_val is not None:
setattr(resolved_tool.callable, attr, attr_val)
return resolved_tool
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_errors.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from jinja2 import TemplateSyntaxError
from promptflow._utils.exception_utils import ExceptionPresenter, infer_error_code_from_class, remove_suffix
from promptflow.exceptions import (
ErrorTarget,
PromptflowException,
SystemErrorException,
UserErrorException,
ValidationException,
)
class InvalidCustomLLMTool(ValidationException):
"""Exception raised when package tool definition is wrong."""
pass
class ValueTypeUnresolved(ValidationException):
pass
class ToolValidationError(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class InvalidRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class ConnectionNotFound(InvalidRequest):
pass
class InvalidBulkTestRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class InvalidFlowRequest(ValidationException):
def __init__(
self,
target: ErrorTarget = ErrorTarget.EXECUTOR,
**kwargs,
):
super().__init__(
target=target,
**kwargs,
)
class NodeInputValidationError(InvalidFlowRequest):
pass
class DuplicateNodeName(InvalidFlowRequest):
pass
class EmptyOutputReference(InvalidFlowRequest):
pass
class OutputReferenceNotFound(InvalidFlowRequest):
pass
class InputReferenceNotFound(InvalidFlowRequest):
pass
class InputNotFound(InvalidFlowRequest):
pass
class InvalidAggregationInput(SystemErrorException):
pass
class InputNotFoundFromAncestorNodeOutput(SystemErrorException):
pass
class NoNodeExecutedError(SystemErrorException):
pass
class InputTypeError(InvalidFlowRequest):
pass
class InputParseError(InvalidFlowRequest):
pass
class InvalidConnectionType(InvalidFlowRequest):
pass
class NodeReferenceNotFound(InvalidFlowRequest):
pass
class NodeCircularDependency(InvalidFlowRequest):
pass
class InvalidNodeReference(InvalidFlowRequest):
pass
class NodeReferenceError(UserErrorException):
"""Exception raised when node reference not found or unsupported"""
pass
class UnsupportedReference(NodeReferenceError):
pass
class InvalidReferenceProperty(NodeReferenceError):
pass
class OutputReferenceNotExist(NodeReferenceError):
pass
class NodeOutputNotFound(UserErrorException):
pass
class SingleNodeValidationError(UserErrorException):
pass
class LineExecutionTimeoutError(UserErrorException):
"""Exception raised when single line execution timeout"""
def __init__(self, line_number, timeout):
super().__init__(
message_format="Line {line_number} execution timeout for exceeding {timeout} seconds",
line_number=line_number,
timeout=timeout,
target=ErrorTarget.EXECUTOR,
)
class BatchExecutionTimeoutError(UserErrorException):
"""Exception raised when batch timeout is exceeded"""
def __init__(self, line_number, timeout):
super().__init__(
message_format=(
"Line {line_number} execution terminated due to the "
"total batch run exceeding the batch timeout ({timeout}s)."
),
line_number=line_number,
timeout=timeout,
target=ErrorTarget.BATCH,
)
class ProcessCrashError(UserErrorException):
"""Exception raised when process crashed."""
def __init__(self, line_number):
super().__init__(message=f"Process crashed while executing line {line_number},", target=ErrorTarget.EXECUTOR)
class ProcessTerminatedTimeout(SystemErrorException):
"""Exception raised when process not terminated within a period of time."""
def __init__(self, timeout):
super().__init__(message=f"Process has not terminated after {timeout} seconds", target=ErrorTarget.EXECUTOR)
class ProcessInfoObtainedTimeout(SystemErrorException):
"""Exception raised when process info not obtained within a period of time."""
def __init__(self, timeout):
super().__init__(message=f"Failed to get process info after {timeout} seconds", target=ErrorTarget.EXECUTOR)
class EmptyLLMApiMapping(UserErrorException):
"""Exception raised when connection_type_to_api_mapping is empty and llm node provider can't be inferred"""
def __init__(self):
super().__init__(
message="LLM api mapping is empty, please ensure 'promptflow-tools' package has been installed.",
target=ErrorTarget.EXECUTOR,
)
class ResolveToolError(PromptflowException):
"""Exception raised when tool load failed.
It is used to append the name of the failed node to the error message to improve the user experience.
It simply wraps the error thrown by the Resolve Tool phase.
It has the same additional_info and error_codes as inner error.
"""
def __init__(self, *, node_name: str, target: ErrorTarget = ErrorTarget.EXECUTOR, module: str = None):
self._node_name = node_name
super().__init__(target=target, module=module)
@property
def message(self):
if self.inner_exception:
error_type_and_message = f"({self.inner_exception.__class__.__name__}) {self.inner_exception}"
if isinstance(self.inner_exception, TemplateSyntaxError):
error_type_and_message = (
f"Jinja parsing failed at line {self.inner_exception.lineno}: {error_type_and_message}"
)
return remove_suffix(self._message, ".") + f": {error_type_and_message}"
return self._message
@property
def message_format(self):
return "Tool load failed in '{node_name}'."
@property
def message_parameters(self):
return {"node_name": self._node_name}
@property
def additional_info(self):
"""Get additional info from innererror when the innererror is PromptflowException"""
if isinstance(self.inner_exception, PromptflowException):
return self.inner_exception.additional_info
return None
@property
def error_codes(self):
"""The hierarchy of the error codes.
We follow the "Microsoft REST API Guidelines" to define error codes in a hierarchy style.
See the below link for details:
https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses
Due to ResolveToolError has no classification of its own.
Its error_codes respect the inner_error.
"""
if self.inner_exception:
return ExceptionPresenter.create(self.inner_exception).error_codes
return [infer_error_code_from_class(SystemErrorException), self.__class__.__name__]
class UnsupportedAssistantToolType(ValidationException):
pass
class InvalidFlowFileError(UserErrorException):
pass
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_tool_invoker.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from promptflow._core.tool import ToolInvoker
class DefaultToolInvoker(ToolInvoker):
def invoke_tool(self, f, *args, **kwargs):
return f(*args, **kwargs) # Do nothing
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_docstring_parser.py | import docutils.nodes
from docutils.core import publish_doctree
class DocstringParser:
@staticmethod
def parse(docstring: str):
doctree = publish_doctree(docstring)
description = doctree[0].astext()
params = {}
for field in doctree.traverse(docutils.nodes.field):
field_name = field[0].astext()
field_body = field[1].astext()
if field_name.startswith("param"):
param_name = field_name.split(" ")[1]
if param_name not in params:
params[param_name] = {}
params[param_name]["description"] = field_body
if field_name.startswith("type"):
param_name = field_name.split(" ")[1]
if param_name not in params:
params[param_name] = {}
params[param_name]["type"] = field_body
return description, params
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_flow_nodes_scheduler.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import asyncio
import contextvars
import inspect
import threading
from concurrent import futures
from concurrent.futures import Future, ThreadPoolExecutor
from typing import Dict, List, Optional, Tuple
from promptflow._core.flow_execution_context import FlowExecutionContext
from promptflow._core.tools_manager import ToolsManager
from promptflow._utils.logger_utils import flow_logger
from promptflow._utils.utils import set_context
from promptflow.contracts.flow import Node
from promptflow.executor._dag_manager import DAGManager
from promptflow.executor._errors import LineExecutionTimeoutError, NoNodeExecutedError
RUN_FLOW_NODES_LINEARLY = 1
DEFAULT_CONCURRENCY_BULK = 2
DEFAULT_CONCURRENCY_FLOW = 16
class FlowNodesScheduler:
def __init__(
self,
tools_manager: ToolsManager,
inputs: Dict,
nodes_from_invoker: List[Node],
node_concurrency: int,
context: FlowExecutionContext,
) -> None:
self._tools_manager = tools_manager
self._future_to_node: Dict[Future, Node] = {}
self._node_concurrency = min(node_concurrency, DEFAULT_CONCURRENCY_FLOW)
flow_logger.info(f"Start to run {len(nodes_from_invoker)} nodes with concurrency level {node_concurrency}.")
self._dag_manager = DAGManager(nodes_from_invoker, inputs)
self._context = context
def wait_within_timeout(self, execution_event: threading.Event, timeout: int):
flow_logger.info(f"Timeout task is scheduled to wait for {timeout} seconds.")
signal = execution_event.wait(timeout=timeout)
if signal:
flow_logger.info("Timeout task is cancelled because the execution is finished.")
else:
flow_logger.warning(f"Timeout task timeouted after waiting for {timeout} seconds.")
def execute(
self,
line_timeout_sec: Optional[int] = None,
) -> Tuple[dict, dict]:
parent_context = contextvars.copy_context()
with ThreadPoolExecutor(
max_workers=self._node_concurrency, initializer=set_context, initargs=(parent_context,)
) as executor:
self._execute_nodes(executor)
timeout_task = None
event = threading.Event()
if line_timeout_sec is not None:
timeout_task = executor.submit(self.wait_within_timeout, event, line_timeout_sec)
try:
while not self._dag_manager.completed():
if not self._future_to_node:
raise NoNodeExecutedError("No nodes are ready for execution, but the flow is not completed.")
tasks_to_wait = list(self._future_to_node.keys())
if timeout_task is not None:
tasks_to_wait.append(timeout_task)
completed_futures_with_wait, _ = futures.wait(tasks_to_wait, return_when=futures.FIRST_COMPLETED)
completed_futures = [f for f in completed_futures_with_wait if f in self._future_to_node]
self._dag_manager.complete_nodes(self._collect_outputs(completed_futures))
for each_future in completed_futures:
del self._future_to_node[each_future]
if timeout_task and timeout_task.done():
raise LineExecutionTimeoutError(self._context._line_number, line_timeout_sec)
self._execute_nodes(executor)
except Exception as e:
err_msg = "Flow execution has failed."
if isinstance(e, LineExecutionTimeoutError):
err_msg = f"Line execution timeout after {line_timeout_sec} seconds."
self._context.cancel_node_runs(err_msg)
node_names = ",".join(node.name for node in self._future_to_node.values())
flow_logger.error(f"{err_msg} Cancelling all running nodes: {node_names}.")
for unfinished_future in self._future_to_node.keys():
# We can't cancel running tasks here, only pending tasks could be cancelled.
unfinished_future.cancel()
# Even we raise exception here, still need to wait all running jobs finish to exit.
raise e
finally:
# Cancel timeout task no matter the execution is finished or failed.
event.set()
for node in self._dag_manager.bypassed_nodes:
self._dag_manager.completed_nodes_outputs[node] = None
return self._dag_manager.completed_nodes_outputs, self._dag_manager.bypassed_nodes
def _execute_nodes(self, executor: ThreadPoolExecutor):
# Skip nodes and update node run info until there are no nodes to bypass
nodes_to_bypass = self._dag_manager.pop_bypassable_nodes()
while nodes_to_bypass:
for node in nodes_to_bypass:
self._context.bypass_node(node)
nodes_to_bypass = self._dag_manager.pop_bypassable_nodes()
# Submit nodes that are ready to run
nodes_to_exec = self._dag_manager.pop_ready_nodes()
if nodes_to_exec:
self._submit_nodes(executor, nodes_to_exec)
def _collect_outputs(self, completed_futures: List[Future]):
completed_nodes_outputs = {}
for each_future in completed_futures:
each_node_result = each_future.result()
each_node = self._future_to_node[each_future]
completed_nodes_outputs[each_node.name] = each_node_result
return completed_nodes_outputs
def _submit_nodes(self, executor: ThreadPoolExecutor, nodes):
for each_node in nodes:
future = executor.submit(self._exec_single_node_in_thread, (each_node, self._dag_manager))
self._future_to_node[future] = each_node
def _exec_single_node_in_thread(self, args: Tuple[Node, DAGManager]):
node, dag_manager = args
# We are using same run tracker and cache manager for all threads, which may not thread safe.
# But for bulk run scenario, we've doing this for a long time, and it works well.
context = self._context
f = self._tools_manager.get_tool(node.name)
kwargs = dag_manager.get_node_valid_inputs(node, f)
if inspect.iscoroutinefunction(f):
# TODO: Run async functions in flow level event loop
result = asyncio.run(context.invoke_tool_async(node, f, kwargs=kwargs))
else:
result = context.invoke_tool(node, f, kwargs=kwargs)
return result
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_assistant_tool_invoker.py | import os
from dataclasses import dataclass
from functools import partial
from pathlib import Path
from typing import Callable, Dict, Optional
from promptflow.contracts.flow import InputAssignment, Node, ToolSource
from promptflow.contracts.tool import ToolType
from promptflow.exceptions import ErrorTarget
from promptflow.executor._docstring_parser import DocstringParser
from promptflow.executor._errors import UnsupportedAssistantToolType
from promptflow.executor._tool_resolver import ToolResolver
@dataclass
class AssistantTool:
name: str
openai_definition: dict
func: Callable
class AssistantToolInvoker:
def __init__(self, working_dir: Optional[Path] = None):
self._working_dir = working_dir or Path(os.getcwd())
self._assistant_tools: Dict[str, AssistantTool] = {}
@classmethod
def init(cls, tools: list, working_dir: Optional[Path] = None):
invoker = cls(working_dir=working_dir)
invoker._load_tools(tools)
return invoker
def _load_tools(self, tools: list):
for tool in tools:
if tool["type"] in ("code_interpreter", "retrieval"):
self._assistant_tools[tool["type"]] = AssistantTool(
name=tool["type"], openai_definition=tool, func=None
)
elif tool["type"] == "function":
function_tool = self._load_tool_as_function(tool)
self._assistant_tools[function_tool.name] = function_tool
else:
raise UnsupportedAssistantToolType(
message_format="Unsupported assistant tool type: {tool_type}",
tool_type=tool["type"],
target=ErrorTarget.EXECUTOR,
)
def _load_tool_as_function(self, tool: dict):
tool_resolver = ToolResolver(self._working_dir)
node, predefined_inputs = self._generate_node_for_tool(tool)
resolved_tool = tool_resolver.resolve_tool_by_node(node, convert_input_types=False)
func_name = resolved_tool.definition.function
definition = self._generate_tool_definition(
func_name, resolved_tool.definition.description, predefined_inputs
)
if resolved_tool.node.inputs:
inputs = {name: value.value for name, value in resolved_tool.node.inputs.items()}
func = partial(resolved_tool.callable, **inputs)
else:
func = resolved_tool.callable
return AssistantTool(name=func_name, openai_definition=definition, func=func)
def _generate_node_for_tool(self, tool: dict):
predefined_inputs = {}
for input_name, value in tool.get("predefined_inputs", {}).items():
predefined_inputs[input_name] = InputAssignment.deserialize(value)
node = Node(
name="assistant_node",
tool="assistant_tool",
inputs=predefined_inputs,
source=ToolSource.deserialize(tool["source"]) if "source" in tool else None,
type=ToolType.PYTHON if "tool_type" in tool and tool["tool_type"] == "python" else None,
)
return node, list(predefined_inputs.keys())
def invoke_tool(self, func_name, kwargs):
return self._assistant_tools[func_name].func(**kwargs)
def to_openai_tools(self):
return [tool.openai_definition for tool in self._assistant_tools.values()]
def _generate_tool_definition(self, func_name: str, description: str, predefined_inputs: list) -> dict:
to_openai_type = {
"str": "string", "int": "number", "float": "number", "bool": "boolean", "list": "array", "dict": "object"
}
description, params = DocstringParser.parse(description)
for input in predefined_inputs:
if input in params:
params.pop(input)
for _, param in params.items():
param["type"] = to_openai_type[param["type"]] if param["type"] in to_openai_type else param["type"]
return {
"type": "function",
"function": {
"name": func_name,
"description": description,
"parameters": {
"type": "object",
"properties": params,
"required": list(params.keys())
}
}
}
| 0 |
promptflow_repo/promptflow/src/promptflow/promptflow | promptflow_repo/promptflow/src/promptflow/promptflow/executor/_process_manager.py | import multiprocessing
import queue
import signal
from dataclasses import dataclass
from enum import Enum
from functools import partial
from multiprocessing import Queue
from typing import List
import psutil
from promptflow._core.operation_context import OperationContext
from promptflow._utils.logger_utils import LogContext, bulk_logger
from promptflow.executor.flow_executor import FlowExecutor
@dataclass
class ProcessInfo:
index: int
process_id: str
process_name: str
class ProcessControlSignal(str, Enum):
START = "start"
RESTART = "restart"
END = "end"
class AbstractProcessManager:
"""
AbstractProcessManager is a base class for managing processes.
:param input_queues: Queues for providing input data to the processes.
:type input_queues: List[multiprocessing.Queue]
:param output_queues: Queues for receiving execution results of the processes.
:type output_queues: List[multiprocessing.Queue]
:param process_info: Dictionary to store information about the processes.
:type process_info: dict
:param process_target_func: The target function that the processes will execute.
:param raise_ex: Flag to determine whether to raise exceptions or not.
:type raise_ex: bool
"""
def __init__(
self,
input_queues: List[Queue],
output_queues: List[Queue],
process_info: dict,
process_target_func,
*args, **kwargs,
) -> None:
self._input_queues = input_queues
self._output_queues = output_queues
self._process_info = process_info
self._process_target_func = process_target_func
current_log_context = LogContext.get_current()
self._log_context_initialization_func = current_log_context.get_initializer() if current_log_context else None
self._current_operation_context = OperationContext.get_instance().get_context_dict()
def new_process(self, i):
"""
Create and start a new process.
:param i: Index of the new process to start.
:type i: int
"""
raise NotImplementedError("AbstractProcessManager is an abstract class, no implementation for new_process.")
def restart_process(self, i):
"""
Restarts a specified process
:param i: Index of the process to restart.
:type i: int
"""
raise NotImplementedError("AbstractProcessManager is an abstract class, no implementation for restart_process.")
def end_process(self, i):
"""
Terminates a specified process.
:param i: Index of the process to terminate.
:type i: int
"""
raise NotImplementedError("AbstractProcessManager is an abstract class, no implementation for end_process.")
class SpawnProcessManager(AbstractProcessManager):
"""
SpawnProcessManager extends AbstractProcessManager to specifically manage processes using the 'spawn' start method.
:param executor_creation_func: Function to create an executor for each process.
:param args: Additional positional arguments for the AbstractProcessManager.
:param kwargs: Additional keyword arguments for the AbstractProcessManager.
"""
def __init__(self, executor_creation_func, *args, **kwargs):
super().__init__(*args, **kwargs)
self._executor_creation_func = executor_creation_func
self.context = multiprocessing.get_context("spawn")
def start_processes(self):
"""
Initiates processes.
"""
for i in range(len(self._input_queues)):
self.new_process(i)
def new_process(self, i):
"""
Create and start a new process using the 'spawn' context.
:param i: Index of the input and output queue for the new process.
:type i: int
"""
process = self.context.Process(
target=self._process_target_func,
args=(
self._executor_creation_func,
self._input_queues[i],
self._output_queues[i],
self._log_context_initialization_func,
self._current_operation_context,
),
# Set the process as a daemon process to automatically terminated and release system resources
# when the main process exits.
daemon=True,
)
process.start()
try:
self._process_info[i] = ProcessInfo(
index=i,
process_id=process.pid,
process_name=process.name,
)
except Exception as e:
bulk_logger.warning(
f"Unexpected error occurred while creating ProcessInfo for index {i} and process id {process.pid}. "
f"Exception: {e}"
)
return process
def restart_process(self, i):
"""
Restarts a specified process by first terminating it then creating a new one.
:param i: Index of the process to restart.
:type i: int
"""
self.end_process(i)
self.new_process(i)
def end_process(self, i):
"""
Terminates a specified process.
:param i: Index of the process to terminate.
:type i: int
"""
try:
pid = self._process_info[i].process_id
process = psutil.Process(pid)
process.terminate()
process.wait()
self._process_info.pop(i)
except psutil.NoSuchProcess:
bulk_logger.warning(f"Process {pid} had been terminated")
except Exception as e:
bulk_logger.warning(
f"Unexpected error occurred while end process for index {i} and process id {process.pid}. "
f"Exception: {e}"
)
class ForkProcessManager(AbstractProcessManager):
'''
ForkProcessManager extends AbstractProcessManager to manage processes using the 'fork' method
in a spawned process.
:param control_signal_queue: A queue for controlling signals to manage process operations.
:type control_signal_queue: multiprocessing.Queue
:param flow_file: The path to the flow file.
:type flow_file: Path
:param connections: The connections to be used for the flow.
:type connections: dict
:param working_dir: The working directory to be used for the flow.
:type working_dir: str
:param args: Additional positional arguments for the AbstractProcessManager.
:param kwargs: Additional keyword arguments for the AbstractProcessManager.
"""
'''
def __init__(self, control_signal_queue: Queue, flow_create_kwargs, *args, **kwargs):
super().__init__(*args, **kwargs)
self._control_signal_queue = control_signal_queue
self._flow_create_kwargs = flow_create_kwargs
def start_processes(self):
"""
Initiates a process with "spawn" method to establish a clean environment.
"""
context = multiprocessing.get_context("spawn")
process = context.Process(
target=create_spawned_fork_process_manager,
args=(
self._log_context_initialization_func,
self._current_operation_context,
self._input_queues,
self._output_queues,
self._control_signal_queue,
self._flow_create_kwargs,
self._process_info,
self._process_target_func,
),
)
process.start()
def restart_process(self, i):
"""
Sends a signal to restart a specific process.
:param i: Index of the process to restart.
:type i: int
"""
self._control_signal_queue.put((ProcessControlSignal.RESTART, i))
def end_process(self, i):
"""
Sends a signal to terminate a specific process.
:param i: Index of the process to terminate.
:type i: int
"""
self._control_signal_queue.put((ProcessControlSignal.END, i))
def new_process(self, i):
"""
Sends a signal to start a new process.
:param i: Index of the new process to start.
:type i: int
"""
self._control_signal_queue.put((ProcessControlSignal.START, i))
class SpawnedForkProcessManager(AbstractProcessManager):
"""
SpawnedForkProcessManager extends AbstractProcessManager to manage processes using 'fork' method
in a spawned process.
:param control_signal_queue: A queue for controlling signals to manage process operations.
:type control_signal_queue: multiprocessing.Queue
:param executor_creation_func: Function to create an executor for each process.
:type executor_creation_func: Callable
:param args: Additional positional arguments for the AbstractProcessManager.
:param kwargs: Additional keyword arguments for the AbstractProcessManager.
"""
def __init__(
self,
log_context_initialization_func,
current_operation_context,
control_signal_queue,
executor_creation_func,
*args,
**kwargs,
):
super().__init__(*args, **kwargs)
self._log_context_initialization_func = log_context_initialization_func
self._current_operation_context = current_operation_context
self._control_signal_queue = control_signal_queue
self._executor_creation_func = executor_creation_func
self.context = multiprocessing.get_context("fork")
def new_process(self, i):
"""
Create and start a new process using the 'fork' context.
:param i: Index of the input and output queue for the new process.
:type i: int
"""
process = self.context.Process(
target=self._process_target_func,
args=(
self._executor_creation_func,
self._input_queues[i],
self._output_queues[i],
self._log_context_initialization_func,
self._current_operation_context,
),
daemon=True,
)
process.start()
try:
self._process_info[i] = ProcessInfo(
index=i,
process_id=process.pid,
process_name=process.name,
)
except Exception as e:
bulk_logger.warning(
f"Unexpected error occurred while creating ProcessInfo for index {i} and process id {process.pid}. "
f"Exception: {e}"
)
return process
def end_process(self, i):
"""
Terminates a specified process.
:param i: Index of the process to terminate.
:type i: int
"""
try:
pid = self._process_info[i].process_id
process = psutil.Process(pid)
process.terminate()
process.wait()
self._process_info.pop(i)
except psutil.NoSuchProcess:
bulk_logger.warning(f"Process {pid} had been terminated")
except Exception as e:
bulk_logger.warning(
f"Unexpected error occurred while end process for index {i} and process id {process.pid}. "
f"Exception: {e}"
)
def restart_process(self, i):
"""
Restarts a specified process by first terminating it then creating a new one.
:param i: Index of the process to restart.
:type i: int
"""
self.end_process(i)
self.new_process(i)
def handle_signals(self, control_signal, i):
"""
Handles control signals for processes, performing actions such as starting, ending,
or restarting them based on the received signal.
:param control_signal: The control signal indicating the desired action. It can be 'start', 'end', or 'restart'.
:type control_signal: str
:param i: Index of the process to control.
:type i: int
"""
if control_signal == ProcessControlSignal.END:
self.end_process(i)
elif control_signal == ProcessControlSignal.RESTART:
self.restart_process(i)
elif control_signal == ProcessControlSignal.START:
self.new_process(i)
def create_spawned_fork_process_manager(
log_context_initialization_func,
current_operation_context,
input_queues,
output_queues,
control_signal_queue,
flow_create_kwargs,
process_info,
process_target_func,
):
"""
Manages the creation, termination, and signaling of processes using the 'fork' context.
"""
# Set up signal handling for process interruption.
from promptflow.executor._line_execution_process_pool import create_executor_fork, signal_handler
signal.signal(signal.SIGINT, signal_handler)
# Create flow executor.
executor = FlowExecutor.create(**flow_create_kwargs)
# When using fork, we use this method to create the executor to avoid reloading the flow
# which will introduce a lot more memory.
executor_creation_func = partial(create_executor_fork, flow_executor=executor)
manager = SpawnedForkProcessManager(
log_context_initialization_func,
current_operation_context,
control_signal_queue,
executor_creation_func,
input_queues,
output_queues,
process_info,
process_target_func,
)
# Initialize processes.
for i in range(len(input_queues)):
manager.new_process(i)
# Main loop to handle control signals and manage process lifecycle.
while True:
all_processes_stopped = True
try:
process_info_list = process_info.items()
except Exception as e:
bulk_logger.warning(f"Unexpected error occurred while get process info list. Exception: {e}")
break
for _, info in list(process_info_list):
pid = info.process_id
# Check if at least one process is alive.
if psutil.pid_exists(pid):
process = psutil.Process(pid)
if process.status() != "zombie":
all_processes_stopped = False
else:
# If do not call wait(), the child process may become a zombie process,
# and psutil.pid_exists(pid) is always true, which will cause spawn proces
# never exit.
process.wait()
# If all fork child processes exit, exit the loop.
if all_processes_stopped:
break
try:
control_signal, i = control_signal_queue.get(timeout=1)
manager.handle_signals(control_signal, i)
except queue.Empty:
# Do nothing until the process_queue have not content or process is killed
pass
| 0 |
promptflow_repo/promptflow/src/promptflow | promptflow_repo/promptflow/src/promptflow/tests/_constants.py | from pathlib import Path
PROMOTFLOW_ROOT = Path(__file__).parent.parent
RUNTIME_TEST_CONFIGS_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/runtime")
EXECUTOR_REQUESTS_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/executor_api_requests")
MODEL_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/e2e_samples")
CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix()
ENV_FILE = (PROMOTFLOW_ROOT / ".env").resolve().absolute().as_posix()
# below constants are used for pfazure and global config tests
DEFAULT_SUBSCRIPTION_ID = "96aede12-2f73-41cb-b983-6d11a904839b"
DEFAULT_RESOURCE_GROUP_NAME = "promptflow"
DEFAULT_WORKSPACE_NAME = "promptflow-eastus2euap"
DEFAULT_RUNTIME_NAME = "test-runtime-ci"
DEFAULT_REGISTRY_NAME = "promptflow-preview"
| 0 |
promptflow_repo/promptflow/src/promptflow | promptflow_repo/promptflow/src/promptflow/tests/conftest.py | import importlib
import json
import os
import tempfile
from multiprocessing import Lock
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from _constants import (
CONNECTION_FILE,
DEFAULT_REGISTRY_NAME,
DEFAULT_RESOURCE_GROUP_NAME,
DEFAULT_RUNTIME_NAME,
DEFAULT_SUBSCRIPTION_ID,
DEFAULT_WORKSPACE_NAME,
ENV_FILE,
)
from _pytest.monkeypatch import MonkeyPatch
from dotenv import load_dotenv
from filelock import FileLock
from pytest_mock import MockerFixture
from sdk_cli_azure_test.recording_utilities import SanitizedValues, is_replay
from promptflow._cli._utils import AzureMLWorkspaceTriad
from promptflow._constants import PROMPTFLOW_CONNECTIONS
from promptflow._core.connection_manager import ConnectionManager
from promptflow._core.openai_injector import inject_openai_api
from promptflow._utils.context_utils import _change_working_dir
from promptflow.connections import AzureOpenAIConnection
load_dotenv()
@pytest.fixture(scope="session", autouse=True)
def modify_work_directory():
os.chdir(Path(__file__).parent.parent.absolute())
@pytest.fixture(autouse=True, scope="session")
def mock_build_info():
"""Mock BUILD_INFO environment variable in pytest.
BUILD_INFO is set as environment variable in docker image, but not in local test.
So we need to mock it in test senario. Rule - build_number is set as
ci-<BUILD_BUILDNUMBER> in CI pipeline, and set as local in local dev test."""
if "BUILD_INFO" not in os.environ:
m = MonkeyPatch()
build_number = os.environ.get("BUILD_BUILDNUMBER", "")
buid_info = {"build_number": f"ci-{build_number}" if build_number else "local-pytest"}
m.setenv("BUILD_INFO", json.dumps(buid_info))
yield m
@pytest.fixture(autouse=True, scope="session")
def inject_api():
"""Inject OpenAI API during test session.
AOAI call in promptflow should involve trace logging and header injection. Inject
function to API call in test scenario."""
inject_openai_api()
@pytest.fixture
def dev_connections() -> dict:
with open(CONNECTION_FILE, "r") as f:
return json.load(f)
@pytest.fixture
def use_secrets_config_file(mocker: MockerFixture):
mocker.patch.dict(os.environ, {PROMPTFLOW_CONNECTIONS: CONNECTION_FILE})
@pytest.fixture
def env_with_secrets_config_file():
_lock = Lock()
with _lock:
with open(ENV_FILE, "w") as f:
f.write(f"{PROMPTFLOW_CONNECTIONS}={CONNECTION_FILE}\n")
yield ENV_FILE
if os.path.exists(ENV_FILE):
os.remove(ENV_FILE)
@pytest.fixture
def azure_open_ai_connection() -> AzureOpenAIConnection:
return ConnectionManager().get("azure_open_ai_connection")
@pytest.fixture
def temp_output_dir() -> str:
with tempfile.TemporaryDirectory() as temp_dir:
yield temp_dir
@pytest.fixture
def prepare_symbolic_flow() -> str:
flows_dir = Path(__file__).parent / "test_configs" / "flows"
target_folder = flows_dir / "web_classification_with_symbolic"
source_folder = flows_dir / "web_classification"
with _change_working_dir(target_folder):
for file_name in os.listdir(source_folder):
if not Path(file_name).exists():
os.symlink(source_folder / file_name, file_name)
return target_folder
@pytest.fixture(scope="session")
def install_custom_tool_pkg():
# The tests could be running in parallel. Use a lock to prevent race conditions.
lock = FileLock("custom_tool_pkg_installation.lock")
with lock:
try:
import my_tool_package # noqa: F401
except ImportError:
import subprocess
import sys
subprocess.check_call([sys.executable, "-m", "pip", "install", "test-custom-tools==0.0.2"])
@pytest.fixture
def mocked_ws_triple() -> AzureMLWorkspaceTriad:
return AzureMLWorkspaceTriad("mock_subscription_id", "mock_resource_group", "mock_workspace_name")
@pytest.fixture(scope="session")
def mock_list_func():
"""Mock function object for dynamic list testing."""
def my_list_func(prefix: str = "", size: int = 10, **kwargs):
return [
{
"value": "fig0",
"display_value": "My_fig0",
"hyperlink": "https://www.bing.com/search?q=fig0",
"description": "this is 0 item",
},
{
"value": "kiwi1",
"display_value": "My_kiwi1",
"hyperlink": "https://www.bing.com/search?q=kiwi1",
"description": "this is 1 item",
},
]
return my_list_func
@pytest.fixture(scope="session")
def mock_module_with_list_func(mock_list_func):
"""Mock module object for dynamic list testing."""
mock_module = MagicMock()
mock_module.my_list_func = mock_list_func
mock_module.my_field = 1
original_import_module = importlib.import_module # Save this to prevent recursion
with patch.object(importlib, "import_module") as mock_import:
def side_effect(module_name, *args, **kwargs):
if module_name == "my_tool_package.tools.tool_with_dynamic_list_input":
return mock_module
else:
return original_import_module(module_name, *args, **kwargs)
mock_import.side_effect = side_effect
yield
# below fixtures are used for pfazure and global config tests
@pytest.fixture(scope="session")
def subscription_id() -> str:
if is_replay():
return SanitizedValues.SUBSCRIPTION_ID
else:
return os.getenv("PROMPT_FLOW_SUBSCRIPTION_ID", DEFAULT_SUBSCRIPTION_ID)
@pytest.fixture(scope="session")
def resource_group_name() -> str:
if is_replay():
return SanitizedValues.RESOURCE_GROUP_NAME
else:
return os.getenv("PROMPT_FLOW_RESOURCE_GROUP_NAME", DEFAULT_RESOURCE_GROUP_NAME)
@pytest.fixture(scope="session")
def workspace_name() -> str:
if is_replay():
return SanitizedValues.WORKSPACE_NAME
else:
return os.getenv("PROMPT_FLOW_WORKSPACE_NAME", DEFAULT_WORKSPACE_NAME)
@pytest.fixture(scope="session")
def runtime_name() -> str:
return os.getenv("PROMPT_FLOW_RUNTIME_NAME", DEFAULT_RUNTIME_NAME)
@pytest.fixture(scope="session")
def registry_name() -> str:
return os.getenv("PROMPT_FLOW_REGISTRY_NAME", DEFAULT_REGISTRY_NAME)
@pytest.fixture
def enable_logger_propagate():
"""This is for test cases that need to check the log output."""
from promptflow._utils.logger_utils import get_cli_sdk_logger
logger = get_cli_sdk_logger()
original_value = logger.propagate
logger.propagate = True
yield
logger.propagate = original_value
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/utils.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import contextlib
import getpass
import json
from typing import Any, Dict, List
from unittest import mock
import werkzeug
from flask.testing import FlaskClient
@contextlib.contextmanager
def check_activity_end_telemetry(
*,
expected_activities: List[Dict[str, Any]] = None,
**kwargs,
):
if expected_activities is None and kwargs:
expected_activities = [kwargs]
with mock.patch("promptflow._sdk._telemetry.activity.log_activity_end") as mock_telemetry:
yield
actual_activities = [call.args[0] for call in mock_telemetry.call_args_list]
assert mock_telemetry.call_count == len(expected_activities), (
f"telemetry should not be called {len(expected_activities)} times but got {mock_telemetry.call_count}:\n"
f"{json.dumps(actual_activities, indent=2)}\n"
)
default_expected_call = {
"first_call": True,
"activity_type": "PublicApi",
"completion_status": "Success",
"user_agent": f"promptflow-sdk/0.0.1 Werkzeug/{werkzeug.__version__} local_pfs/0.0.1",
}
for i, expected_activity in enumerate(expected_activities):
temp = default_expected_call.copy()
temp.update(expected_activity)
expected_activity = temp
for key, expected_value in expected_activity.items():
value = actual_activities[i][key]
assert (
value == expected_value
), f"{key} mismatch in {i+1}th call: expect {expected_value} but got {value}"
class PFSOperations:
CONNECTION_URL_PREFIX = "/v1.0/Connections"
RUN_URL_PREFIX = "/v1.0/Runs"
TELEMETRY_PREFIX = "/v1.0/Telemetries"
def __init__(self, client: FlaskClient):
self._client = client
def remote_user_header(self):
return {"X-Remote-User": getpass.getuser()}
def heartbeat(self):
return self._client.get("/heartbeat")
# connection APIs
def connection_operation_with_invalid_user(self, status_code=None):
response = self._client.get(f"{self.CONNECTION_URL_PREFIX}/", headers={"X-Remote-User": "invalid_user"})
if status_code:
assert status_code == response.status_code, response.text
return response
def list_connections(self, status_code=None):
response = self._client.get(f"{self.CONNECTION_URL_PREFIX}/", headers=self.remote_user_header())
if status_code:
assert status_code == response.status_code, response.text
return response
def list_connections_by_provider(self, working_dir, status_code=None):
response = self._client.get(
f"{self.CONNECTION_URL_PREFIX}/",
query_string={"working_directory": working_dir},
headers=self.remote_user_header(),
)
if status_code:
assert status_code == response.status_code, response.text
return response
def get_connection(self, name: str, status_code=None):
response = self._client.get(f"{self.CONNECTION_URL_PREFIX}/{name}", headers=self.remote_user_header())
if status_code:
assert status_code == response.status_code, response.text
return response
def get_connections_by_provider(self, name: str, working_dir, status_code=None):
response = self._client.get(
f"{self.CONNECTION_URL_PREFIX}/{name}",
data={"working_directory": working_dir},
headers=self.remote_user_header(),
)
if status_code:
assert status_code == response.status_code, response.text
return response
def get_connection_with_secret(self, name: str, status_code=None):
response = self._client.get(
f"{self.CONNECTION_URL_PREFIX}/{name}/listsecrets", headers=self.remote_user_header()
)
if status_code:
assert status_code == response.status_code, response.text
return response
def get_connection_specs(self, status_code=None):
response = self._client.get(f"{self.CONNECTION_URL_PREFIX}/specs")
if status_code:
assert status_code == response.status_code, response.text
return response
# run APIs
def list_runs(self, status_code=None):
# TODO: add query parameters
response = self._client.get(f"{self.RUN_URL_PREFIX}/", headers=self.remote_user_header())
if status_code:
assert status_code == response.status_code, response.text
return response
def submit_run(self, request_body, status_code=None):
response = self._client.post(f"{self.RUN_URL_PREFIX}/submit", json=request_body)
if status_code:
assert status_code == response.status_code, response.text
return response
def update_run(
self, name: str, display_name: str = None, description: str = None, tags: str = None, status_code=None
):
request_body = {
"display_name": display_name,
"description": description,
"tags": tags,
}
response = self._client.put(f"{self.RUN_URL_PREFIX}/{name}", json=request_body)
if status_code:
assert status_code == response.status_code, response.text
return response
def archive_run(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/archive")
if status_code:
assert status_code == response.status_code, response.text
return response
def restore_run(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/restore")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_run_visualize(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/visualize")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_run(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_child_runs(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/childRuns")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_node_runs(self, name: str, node_name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/nodeRuns/{node_name}")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_run_metadata(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/metaData")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_run_log(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/logContent")
if status_code:
assert status_code == response.status_code, response.text
return response
def get_run_metrics(self, name: str, status_code=None):
response = self._client.get(f"{self.RUN_URL_PREFIX}/{name}/metrics")
if status_code:
assert status_code == response.status_code, response.text
return response
# telemetry APIs
def create_telemetry(self, *, body, headers, status_code=None):
response = self._client.post(
f"{self.TELEMETRY_PREFIX}/",
headers={
**self.remote_user_header(),
**headers,
},
json=body,
)
if status_code:
assert status_code == response.status_code, response.text
return response
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/__init__.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/conftest.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from flask.app import Flask
from promptflow import PFClient
from .utils import PFSOperations
@pytest.fixture
def app() -> Flask:
from promptflow._sdk._service.app import create_app
app, _ = create_app()
app.config.update({"TESTING": True})
yield app
@pytest.fixture
def pfs_op(app: Flask) -> PFSOperations:
client = app.test_client()
return PFSOperations(client)
@pytest.fixture(scope="session")
def pf_client() -> PFClient:
return PFClient()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/.coveragerc | [run]
source =
*/promptflow/_sdk/_service/*
omit =
*/promptflow/_cli/*
*/promptflow/azure/*
*/promptflow/entities/*
*/promptflow/operations/*
*__init__.py*
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/__init__.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/test_connection_apis.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import tempfile
import uuid
from pathlib import Path
import mock
import pytest
from sdk_cli_azure_test.recording_utilities import is_replay
from promptflow import PFClient
from promptflow._sdk.entities import CustomConnection
from ..utils import PFSOperations, check_activity_end_telemetry
def create_custom_connection(client: PFClient) -> str:
name = str(uuid.uuid4())
connection = CustomConnection(name=name, configs={"api_base": "test"}, secrets={"api_key": "test"})
client.connections.create_or_update(connection)
return name
@pytest.mark.e2etest
class TestConnectionAPIs:
def test_list_connections(self, pf_client: PFClient, pfs_op: PFSOperations) -> None:
create_custom_connection(pf_client)
with check_activity_end_telemetry(activity_name="pf.connections.list"):
connections = pfs_op.list_connections().json
assert len(connections) >= 1
def test_get_connection(self, pf_client: PFClient, pfs_op: PFSOperations) -> None:
name = create_custom_connection(pf_client)
with check_activity_end_telemetry(activity_name="pf.connections.get"):
conn_from_pfs = pfs_op.get_connection(name=name, status_code=200).json
assert conn_from_pfs["name"] == name
assert conn_from_pfs["configs"]["api_base"] == "test"
assert "api_key" in conn_from_pfs["secrets"]
# get connection with secret
with check_activity_end_telemetry(activity_name="pf.connections.get"):
conn_from_pfs = pfs_op.get_connection_with_secret(name=name, status_code=200).json
assert not conn_from_pfs["secrets"]["api_key"].startswith("*")
def test_list_connection_with_invalid_user(self, pfs_op: PFSOperations) -> None:
# TODO: should we record telemetry for this case?
with check_activity_end_telemetry(expected_activities=[]):
conn_from_pfs = pfs_op.connection_operation_with_invalid_user()
assert conn_from_pfs.status_code == 403
def test_get_connection_specs(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(expected_activities=[]):
specs = pfs_op.get_connection_specs(status_code=200).json
assert len(specs) > 1
@pytest.mark.skipif(is_replay(), reason="connection provider test, skip in non-live mode.")
def test_get_connection_by_provicer(self, pfs_op, subscription_id, resource_group_name, workspace_name):
target = "promptflow._sdk._pf_client.Configuration.get_connection_provider"
provider_url_target = (
"promptflow._sdk.operations._local_azure_connection_operations."
"LocalAzureConnectionOperations._extract_workspace"
)
mock_provider_url = (subscription_id, resource_group_name, workspace_name)
with mock.patch(target) as mocked_config, mock.patch(provider_url_target) as mocked_provider_url:
mocked_config.return_value = "azureml"
mocked_provider_url.return_value = mock_provider_url
connections = pfs_op.list_connections(status_code=200).json
assert len(connections) > 0
connection = pfs_op.get_connection(name=connections[0]["name"], status_code=200).json
assert connection["name"] == connections[0]["name"]
target = "promptflow._sdk._pf_client.Configuration.get_config"
with tempfile.TemporaryDirectory() as temp:
config_file = Path(temp) / ".azureml" / "config.json"
config_file.parent.mkdir(parents=True, exist_ok=True)
with open(config_file, "w") as f:
config = {
"subscription_id": subscription_id,
"resource_group": resource_group_name,
"workspace_name": workspace_name,
}
json.dump(config, f)
with mock.patch(target) as mocked_config:
mocked_config.return_value = "azureml"
connections = pfs_op.list_connections_by_provider(working_dir=temp, status_code=200).json
assert len(connections) > 0
connection = pfs_op.get_connections_by_provider(
name=connections[0]["name"], working_dir=temp, status_code=200
).json
assert connection["name"] == connections[0]["name"]
# this test checked 2 cases:
# 1. if the working directory is not exist, it should return 400
# 2. working directory has been encoded and decoded correctly, so that previous call may pass validation
error_message = pfs_op.list_connections_by_provider(
working_dir=temp + "not exist", status_code=400
).json
assert error_message == {
"errors": {"working_directory": "Invalid working directory."},
"message": "Input payload validation failed",
}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/test_run_apis.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import uuid
from dataclasses import fields
from pathlib import Path
import pytest
from promptflow import PFClient
from promptflow._sdk.entities import Run
from promptflow.contracts._run_management import RunMetadata
from ..utils import PFSOperations, check_activity_end_telemetry
FLOW_PATH = "./tests/test_configs/flows/print_env_var"
DATA_PATH = "./tests/test_configs/datas/env_var_names.jsonl"
def create_run_against_multi_line_data(client: PFClient) -> Run:
return client.run(flow=FLOW_PATH, data=DATA_PATH)
@pytest.mark.usefixtures("use_secrets_config_file")
@pytest.mark.e2etest
class TestRunAPIs:
@pytest.fixture(autouse=True)
def _submit_run(self, pf_client):
self.run = create_run_against_multi_line_data(pf_client)
def test_list_runs(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.list"):
response = pfs_op.list_runs(status_code=200).json
assert len(response) >= 1
@pytest.mark.skip(reason="Task 2917711: cli command will give strange stdout in ci; re-enable after switch to sdk")
def test_submit_run(self, pfs_op: PFSOperations) -> None:
# run submit is done via cli, so no telemetry will be detected here
with check_activity_end_telemetry(expected_activities=[]):
response = pfs_op.submit_run(
{
"flow": Path(FLOW_PATH).absolute().as_posix(),
"data": Path(DATA_PATH).absolute().as_posix(),
},
status_code=200,
)
with check_activity_end_telemetry(activity_name="pf.runs.get"):
run_from_pfs = pfs_op.get_run(name=response.json["name"]).json
assert run_from_pfs
def update_run(self, pfs_op: PFSOperations) -> None:
display_name = "new_display_name"
tags = {"key": "value"}
with check_activity_end_telemetry(activity_name="pf.runs.update"):
run_from_pfs = pfs_op.update_run(
name=self.run.name, display_name=display_name, tags=json.dumps(tags), status_code=200
).json
assert run_from_pfs["display_name"] == display_name
assert run_from_pfs["tags"] == tags
def test_archive_restore_run(self, pf_client: PFClient, pfs_op: PFSOperations) -> None:
run = create_run_against_multi_line_data(pf_client)
with check_activity_end_telemetry(
expected_activities=[
{"activity_name": "pf.runs.get", "first_call": False},
{"activity_name": "pf.runs.archive"},
]
):
pfs_op.archive_run(name=run.name, status_code=200)
runs = pfs_op.list_runs().json
assert not any([item["name"] == run.name for item in runs])
with check_activity_end_telemetry(
expected_activities=[
{"activity_name": "pf.runs.get", "first_call": False},
{"activity_name": "pf.runs.restore"},
]
):
pfs_op.restore_run(name=run.name, status_code=200)
runs = pfs_op.list_runs().json
assert any([item["name"] == run.name for item in runs])
def test_visualize_run(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(
expected_activities=[
{"activity_name": "pf.runs.get", "first_call": False},
{"activity_name": "pf.runs.get", "first_call": False},
{"activity_name": "pf.runs.get_metrics", "first_call": False},
{"activity_name": "pf.runs.visualize"},
]
):
response = pfs_op.get_run_visualize(name=self.run.name, status_code=200)
assert response.data
def test_get_not_exist_run(self, pfs_op: PFSOperations) -> None:
random_name = str(uuid.uuid4())
with check_activity_end_telemetry(activity_name="pf.runs.get", completion_status="Failure"):
response = pfs_op.get_run(name=random_name)
assert response.status_code == 404
def test_get_run(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
run_from_pfs = pfs_op.get_run(name=self.run.name, status_code=200).json
assert run_from_pfs["name"] == self.run.name
def test_get_child_runs(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
run_from_pfs = pfs_op.get_child_runs(name=self.run.name, status_code=200).json
assert len(run_from_pfs) == 1
assert run_from_pfs[0]["parent_run_id"] == self.run.name
def test_get_node_runs(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
run_from_pfs = pfs_op.get_node_runs(name=self.run.name, node_name="print_env", status_code=200).json
assert len(run_from_pfs) == 1
assert run_from_pfs[0]["node"] == "print_env"
def test_get_run_log(self, pfs_op: PFSOperations, pf_client: PFClient) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
log = pfs_op.get_run_log(name=self.run.name, status_code=200)
assert not log.data.decode("utf-8").startswith('"')
def test_get_run_metrics(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
metrics = pfs_op.get_run_metrics(name=self.run.name, status_code=200).json
assert metrics is not None
def test_get_run_metadata(self, pfs_op: PFSOperations) -> None:
with check_activity_end_telemetry(activity_name="pf.runs.get"):
metadata = pfs_op.get_run_metadata(name=self.run.name, status_code=200).json
for field in fields(RunMetadata):
assert field.name in metadata
assert metadata["name"] == self.run.name
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/test_general_apis.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from promptflow._sdk._utils import get_promptflow_sdk_version
from ..utils import PFSOperations
@pytest.mark.e2etest
class TestGeneralAPIs:
def test_heartbeat(self, pfs_op: PFSOperations) -> None:
response = pfs_op.heartbeat()
assert response.status_code == 200
response_json = response.json
assert isinstance(response_json, dict)
assert "promptflow" in response_json
assert response_json["promptflow"] == get_promptflow_sdk_version()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/test_cli.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import subprocess
import sys
from time import sleep
import pytest
import requests
from promptflow._sdk._service.entry import main
from promptflow._sdk._service.utils.utils import get_port_from_config, get_random_port, kill_exist_service
@pytest.mark.e2etest
class TestPromptflowServiceCLI:
def _run_pfs_command(self, *args):
"""Run a pfs command with the given arguments."""
origin_argv = sys.argv
try:
sys.argv = ["pfs"] + list(args)
main()
finally:
sys.argv = origin_argv
def _test_start_service(self, port=None, force=False):
command = f"pfs start --port {port}" if port else "pfs start"
if force:
command = f"{command} --force"
start_pfs = subprocess.Popen(command, shell=True)
# Wait for service to be started
sleep(5)
assert self._is_service_healthy()
start_pfs.terminate()
start_pfs.wait(10)
def _is_service_healthy(self, port=None):
port = port or get_port_from_config()
response = requests.get(f"http://localhost:{port}/heartbeat")
return response.status_code == 200
def test_start_service(self):
try:
# start pfs by pf.yaml
self._test_start_service()
# Start pfs by specified port
random_port = get_random_port()
self._test_start_service(port=random_port, force=True)
# Force start pfs
start_pfs = subprocess.Popen("pfs start", shell=True)
# Wait for service to be started
sleep(5)
self._test_start_service(force=True)
# previous pfs is killed
assert start_pfs.poll() is not None
finally:
port = get_port_from_config()
kill_exist_service(port=port)
def test_show_service_status(self, capsys):
with pytest.raises(SystemExit):
self._run_pfs_command("show-status")
start_pfs = subprocess.Popen("pfs start", shell=True)
# Wait for service to be started
sleep(5)
self._run_pfs_command("show-status")
output, _ = capsys.readouterr()
assert str(get_port_from_config()) in output
start_pfs.terminate()
start_pfs.wait(10)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_pfs_test/e2etests/test_telemetry_apis.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from ..utils import PFSOperations, check_activity_end_telemetry
@pytest.mark.usefixtures("use_secrets_config_file")
@pytest.mark.e2etest
class TestTelemetryAPIs:
def test_post_telemetry(self, pfs_op: PFSOperations) -> None:
from promptflow._sdk._telemetry.activity import generate_request_id
request_id = generate_request_id()
user_agent = "prompt-flow-extension/1.8.0 (win32; x64) VS/0.0.1"
_ = pfs_op.create_telemetry(
body={
"eventType": "Start",
"timestamp": "2021-01-01T00:00:00Z",
"metadata": {
"activityName": "pf.flow.test",
"activityType": "InternalCall",
},
},
status_code=200,
headers={
"x-ms-promptflow-request-id": request_id,
"User-Agent": user_agent,
},
).json
with check_activity_end_telemetry(
activity_name="pf.flow.test",
activity_type="InternalCall",
user_agent=f"{user_agent} local_pfs/0.0.1",
request_id=request_id,
):
response = pfs_op.create_telemetry(
body={
"eventType": "End",
"timestamp": "2021-01-01T00:00:00Z",
"metadata": {
"activityName": "pf.flow.test",
"activityType": "InternalCall",
"completionStatus": "Success",
"durationMs": 1000,
},
},
headers={
"x-ms-promptflow-request-id": request_id,
"User-Agent": user_agent,
},
status_code=200,
).json
assert len(response) >= 1
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_global_config_test/conftest.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from promptflow import PFClient
from promptflow._sdk._configuration import Configuration
AZUREML_RESOURCE_PROVIDER = "Microsoft.MachineLearningServices"
RESOURCE_ID_FORMAT = "/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}"
@pytest.fixture
def pf() -> PFClient:
return PFClient()
@pytest.fixture
def global_config(subscription_id: str, resource_group_name: str, workspace_name: str) -> None:
config = Configuration.get_instance()
if Configuration.CONNECTION_PROVIDER in config._config:
return
config.set_config(
Configuration.CONNECTION_PROVIDER,
"azureml:"
+ RESOURCE_ID_FORMAT.format(subscription_id, resource_group_name, AZUREML_RESOURCE_PROVIDER, workspace_name),
)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_global_config_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_global_config_test/e2etests/test_global_config.py | from pathlib import Path
import pytest
FLOWS_DIR = Path(__file__).parent.parent.parent / "test_configs" / "flows"
DATAS_DIR = Path(__file__).parent.parent.parent / "test_configs" / "datas"
@pytest.mark.usefixtures("global_config")
@pytest.mark.e2etest
class TestGlobalConfig:
def test_basic_flow_bulk_run(self, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
assert run.status == "Completed"
# Test repeated execute flow run
run = pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
assert run.status == "Completed"
def test_connection_operations(self, pf) -> None:
connections = pf.connections.list()
assert len(connections) > 0, f"No connection found. Provider: {pf._connection_provider}"
# Assert create/update/delete not supported.
with pytest.raises(NotImplementedError):
pf.connections.create_or_update(connection=connections[0])
with pytest.raises(NotImplementedError):
pf.connections.delete(name="test_connection")
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/conftest.py | import base64
import json
import multiprocessing
import os
from pathlib import Path
from unittest.mock import patch
import pytest
from mock import mock
from pytest_mock import MockerFixture
from sqlalchemy import create_engine
from promptflow import PFClient
from promptflow._sdk._configuration import Configuration
from promptflow._sdk._constants import EXPERIMENT_CREATED_ON_INDEX_NAME, EXPERIMENT_TABLE_NAME, LOCAL_MGMT_DB_PATH
from promptflow._sdk._serving.app import create_app as create_serving_app
from promptflow._sdk.entities import AzureOpenAIConnection as AzureOpenAIConnectionEntity
from promptflow._sdk.entities._connection import CustomConnection, _Connection
from promptflow.executor._line_execution_process_pool import _process_wrapper
from promptflow.executor._process_manager import create_spawned_fork_process_manager
from .recording_utilities import RecordStorage, mock_tool, recording_array_extend, recording_array_reset
PROMOTFLOW_ROOT = Path(__file__) / "../../.."
RUNTIME_TEST_CONFIGS_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/runtime")
RECORDINGS_TEST_CONFIGS_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/node_recordings").resolve()
CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix()
MODEL_ROOT = Path(PROMOTFLOW_ROOT / "tests/test_configs/flows")
@pytest.fixture(scope="session")
def local_client() -> PFClient:
yield PFClient()
@pytest.fixture(scope="session")
def pf() -> PFClient:
yield PFClient()
@pytest.fixture()
def local_aoai_connection(local_client, azure_open_ai_connection):
conn = AzureOpenAIConnectionEntity(
name="azure_open_ai_connection",
api_key=azure_open_ai_connection.api_key,
api_base=azure_open_ai_connection.api_base,
)
local_client.connections.create_or_update(conn)
return conn
@pytest.fixture()
def local_alt_aoai_connection(local_client, azure_open_ai_connection):
conn = AzureOpenAIConnectionEntity(
name="new_ai_connection",
api_key=azure_open_ai_connection.api_key,
api_base=azure_open_ai_connection.api_base,
)
local_client.connections.create_or_update(conn)
return conn
@pytest.fixture()
def local_custom_connection(local_client, azure_open_ai_connection):
conn = CustomConnection(
name="test_custom_connection",
secrets={"test_secret": "test_value"},
)
local_client.connections.create_or_update(conn)
return conn
_connection_setup = False
@pytest.fixture
def setup_local_connection(local_client, azure_open_ai_connection):
global _connection_setup
if _connection_setup:
return
connection_dict = json.loads(open(CONNECTION_FILE, "r").read())
for name, _dct in connection_dict.items():
if _dct["type"] == "BingConnection":
continue
local_client.connections.create_or_update(_Connection._from_execution_connection_dict(name=name, data=_dct))
_connection_setup = True
@pytest.fixture
def setup_experiment_table():
with mock.patch("promptflow._sdk._configuration.Configuration.is_internal_features_enabled") as mock_func:
mock_func.return_value = True
# Call this session to initialize session maker, then add experiment table
from promptflow._sdk._orm import Experiment, mgmt_db_session
from promptflow._sdk._orm.session import create_index_if_not_exists, create_or_update_table
mgmt_db_session()
engine = create_engine(f"sqlite:///{str(LOCAL_MGMT_DB_PATH)}", future=True)
if Configuration.get_instance().is_internal_features_enabled():
create_or_update_table(engine, orm_class=Experiment, tablename=EXPERIMENT_TABLE_NAME)
create_index_if_not_exists(engine, EXPERIMENT_CREATED_ON_INDEX_NAME, EXPERIMENT_TABLE_NAME, "created_on")
@pytest.fixture
def flow_serving_client(mocker: MockerFixture):
model_path = (Path(MODEL_ROOT) / "basic-with-connection").resolve().absolute().as_posix()
mocker.patch.dict(os.environ, {"PROMPTFLOW_PROJECT_PATH": model_path})
mocker.patch.dict(os.environ, {"USER_AGENT": "test-user-agent"})
app = create_serving_app(environment_variables={"API_TYPE": "${azure_open_ai_connection.api_type}"})
app.config.update(
{
"TESTING": True,
}
)
return app.test_client()
@pytest.fixture
def flow_serving_client_with_encoded_connection(mocker: MockerFixture):
from promptflow._core.connection_manager import ConnectionManager
from promptflow._sdk._serving.utils import encode_dict
connection_dict = json.loads(open(CONNECTION_FILE, "r").read())
connection_manager = ConnectionManager(connection_dict)
connections = {"PROMPTFLOW_ENCODED_CONNECTIONS": encode_dict(connection_manager.to_connections_dict())}
return create_client_by_model("basic-with-connection", mocker, connections, extension_type="azureml")
@pytest.fixture
def evaluation_flow_serving_client(mocker: MockerFixture):
model_path = (Path(MODEL_ROOT) / "web_classification").resolve().absolute().as_posix()
mocker.patch.dict(os.environ, {"PROMPTFLOW_PROJECT_PATH": model_path})
app = create_serving_app()
app.config.update(
{
"TESTING": True,
}
)
return app.test_client()
def create_client_by_model(
model_name: str, mocker: MockerFixture, connections: dict = {}, extension_type=None, environment_variables={}
):
model_path = (Path(MODEL_ROOT) / model_name).resolve().absolute().as_posix()
mocker.patch.dict(os.environ, {"PROMPTFLOW_PROJECT_PATH": model_path})
if connections:
mocker.patch.dict(os.environ, connections)
if extension_type and extension_type == "azureml":
environment_variables["API_TYPE"] = "${azure_open_ai_connection.api_type}"
app = create_serving_app(environment_variables=environment_variables, extension_type=extension_type)
app.config.update(
{
"TESTING": True,
}
)
return app.test_client()
@pytest.fixture
def serving_client_llm_chat(mocker: MockerFixture):
return create_client_by_model("chat_flow_with_stream_output", mocker)
@pytest.fixture
def serving_client_python_stream_tools(mocker: MockerFixture):
return create_client_by_model("python_stream_tools", mocker)
@pytest.fixture
def sample_image():
image_path = (Path(MODEL_ROOT) / "python_tool_with_simple_image" / "logo.jpg").resolve()
return base64.b64encode(open(image_path, "rb").read()).decode("utf-8")
@pytest.fixture
def serving_client_image_python_flow(mocker: MockerFixture):
return create_client_by_model("python_tool_with_simple_image", mocker)
@pytest.fixture
def serving_client_composite_image_flow(mocker: MockerFixture):
return create_client_by_model("python_tool_with_composite_image", mocker)
@pytest.fixture
def serving_client_with_environment_variables(mocker: MockerFixture):
return create_client_by_model(
"flow_with_environment_variables",
mocker,
environment_variables={"env2": "runtime_env2", "env10": "aaaaa"},
)
@pytest.fixture
def recording_file_override(request: pytest.FixtureRequest, mocker: MockerFixture):
if RecordStorage.is_replaying_mode() or RecordStorage.is_recording_mode():
file_path = RECORDINGS_TEST_CONFIGS_ROOT / "node_cache.shelve"
RecordStorage.get_instance(file_path)
yield
SpawnProcess = multiprocessing.get_context("spawn").Process
class MockSpawnProcess(SpawnProcess):
def __init__(self, group=None, target=None, *args, **kwargs):
if target == _process_wrapper:
target = _mock_process_wrapper
if target == create_spawned_fork_process_manager:
target = _mock_create_spawned_fork_process_manager
super().__init__(group, target, *args, **kwargs)
@pytest.fixture
def recording_injection(mocker: MockerFixture, recording_file_override):
original_process_class = multiprocessing.get_context("spawn").Process
multiprocessing.get_context("spawn").Process = MockSpawnProcess
if "spawn" == multiprocessing.get_start_method():
multiprocessing.Process = MockSpawnProcess
patches = setup_recording_injection_if_enabled()
try:
yield (RecordStorage.is_replaying_mode() or RecordStorage.is_recording_mode(), recording_array_extend)
finally:
if RecordStorage.is_replaying_mode() or RecordStorage.is_recording_mode():
RecordStorage.get_instance().delete_lock_file()
recording_array_reset()
multiprocessing.get_context("spawn").Process = original_process_class
if "spawn" == multiprocessing.get_start_method():
multiprocessing.Process = original_process_class
for patcher in patches:
patcher.stop()
def setup_recording_injection_if_enabled():
patches = []
if RecordStorage.is_replaying_mode() or RecordStorage.is_recording_mode():
file_path = RECORDINGS_TEST_CONFIGS_ROOT / "node_cache.shelve"
RecordStorage.get_instance(file_path)
from promptflow._core.tool import tool as original_tool
mocked_tool = mock_tool(original_tool)
patch_targets = ["promptflow._core.tool.tool", "promptflow._internal.tool", "promptflow.tool"]
for target in patch_targets:
patcher = patch(target, mocked_tool)
patches.append(patcher)
patcher.start()
return patches
def _mock_process_wrapper(*args, **kwargs):
setup_recording_injection_if_enabled()
return _process_wrapper(*args, **kwargs)
def _mock_create_spawned_fork_process_manager(*args, **kwargs):
setup_recording_injection_if_enabled()
return create_spawned_fork_process_manager(*args, **kwargs)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/.coveragerc | [run]
source =
*/promptflow/_cli/*
*/promptflow/_sdk/*
*/promptflow/azure/*
omit =
*/promptflow/azure/_restclient/*
*__init__.py*
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/recording_utilities/__init__.py | from .constants import ENVIRON_TEST_MODE, RecordMode
from .mock_tool import mock_tool, recording_array_extend, recording_array_reset
from .record_storage import RecordFileMissingException, RecordItemMissingException, RecordStorage
__all__ = [
"RecordStorage",
"RecordMode",
"ENVIRON_TEST_MODE",
"RecordFileMissingException",
"RecordItemMissingException",
"mock_tool",
"recording_array_extend",
"recording_array_reset",
]
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/recording_utilities/record_storage.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import hashlib
import json
import os
import shelve
from pathlib import Path
from typing import Dict
from filelock import FileLock
from promptflow.exceptions import PromptflowException
from .constants import ENVIRON_TEST_MODE, RecordMode
class RecordItemMissingException(PromptflowException):
"""Exception raised when record item missing."""
pass
class RecordFileMissingException(PromptflowException):
"""Exception raised when record file missing or invalid."""
pass
class RecordStorage(object):
"""
RecordStorage is used to store the record of node run.
File often stored in .promptflow/node_cache.shelve
Currently only text input/output could be recorded.
Example of cached items:
{
"/record/file/resolved": {
"hash_value": { # hash_value is sha1 of dict, accelerate the search
"input": {
"key1": "value1", # Converted to string, type info dropped
},
"output": "output_convert_to_string",
"output_type": "output_type" # Currently support only simple strings.
}
}
}
"""
_standard_record_folder = ".promptflow"
_standard_record_name = "node_cache.shelve"
_instance = None
def __init__(self, record_file: str = None):
"""
RecordStorage is used to store the record of node run.
"""
self._record_file: Path = None
self.cached_items: Dict[str, Dict[str, Dict[str, object]]] = {}
self.record_file = record_file
@property
def record_file(self) -> Path:
return self._record_file
@record_file.setter
def record_file(self, record_file_input) -> None:
"""
Will load record_file if exist.
"""
if record_file_input == self._record_file:
return
if isinstance(record_file_input, str):
self._record_file = Path(record_file_input).resolve()
elif isinstance(record_file_input, Path):
self._record_file = record_file_input.resolve()
else:
return
if not self._record_file.parts[-1].endswith(RecordStorage._standard_record_name):
record_folder = self._record_file / RecordStorage._standard_record_folder
self._record_file = record_folder / RecordStorage._standard_record_name
else:
record_folder = self._record_file.parent
self._record_file_str = str(self._record_file.resolve())
# cache folder we could create if not exist.
if not record_folder.exists():
record_folder.mkdir(parents=True, exist_ok=True)
# if file exist, load file
if self.exists_record_file(record_folder, self._record_file.parts[-1]):
self._load_file()
else:
self.cached_items = {
self._record_file_str: {},
}
def exists_record_file(self, record_folder, file_name) -> bool:
files = os.listdir(record_folder)
for file in files:
if file.startswith(file_name):
return True
return False
def _write_file(self, hashkey) -> None:
file_content = self.cached_items.get(self._record_file_str, None)
if file_content is not None:
file_content_line = file_content.get(hashkey, None)
if file_content_line is not None:
lock = FileLock(self.record_file.parent / "record_file.lock")
with lock:
saved_dict = shelve.open(self._record_file_str, "c", writeback=False)
saved_dict[hashkey] = file_content_line
saved_dict.close()
else:
raise RecordItemMissingException(f"Record item not found in cache with hashkey {hashkey}.")
else:
raise RecordFileMissingException(
f"This exception should not happen here, but record file is not found {self._record_file_str}."
)
def _load_file(self) -> None:
local_content = self.cached_items.get(self._record_file_str, None)
if not local_content:
if RecordStorage.is_recording_mode():
lock = FileLock(self.record_file.parent / "record_file.lock")
with lock:
if not self.exists_record_file(self.record_file.parent, self.record_file.parts[-1]):
return
self.cached_items[self._record_file_str] = {}
saved_dict = shelve.open(self._record_file_str, "r", writeback=False)
for key, value in saved_dict.items():
self.cached_items[self._record_file_str][key] = value
saved_dict.close()
else:
if not self.exists_record_file(self.record_file.parent, self.record_file.parts[-1]):
return
self.cached_items[self._record_file_str] = {}
saved_dict = shelve.open(self._record_file_str, "r", writeback=False)
for key, value in saved_dict.items():
self.cached_items[self._record_file_str][key] = value
saved_dict.close()
def delete_lock_file(self):
lock_file = self.record_file.parent / "record_file.lock"
if lock_file.exists():
os.remove(lock_file)
def get_record(self, input_dict: Dict) -> object:
"""
Get record from local storage.
:param input_dict: input dict of critical AOAI inputs
:type input_dict: Dict
:raises RecordFileMissingException: Record file not exist
:raises RecordItemMissingException: Record item not exist in record file
:return: original output of node run
:rtype: object
"""
input_dict = self._recursive_create_hashable_args(input_dict)
hash_value: str = hashlib.sha1(str(sorted(input_dict.items())).encode("utf-8")).hexdigest()
current_saved_records: Dict[str, str] = self.cached_items.get(self._record_file_str, None)
if current_saved_records is None:
raise RecordFileMissingException(f"Record file not found {self.record_file}.")
saved_output = current_saved_records.get(hash_value, None)
if saved_output is None:
raise RecordItemMissingException(
f"Record item not found in file {self.record_file}.\n" f"values: {json.dumps(input_dict)}\n"
)
# not all items are reserved in the output dict.
output = saved_output["output"]
output_type = saved_output["output_type"]
if "generator" in output_type:
return self._create_output_generator(output, output_type)
else:
return output
def _recursive_create_hashable_args(self, item):
if isinstance(item, tuple):
return [self._recursive_create_hashable_args(i) for i in item]
if isinstance(item, list):
return [self._recursive_create_hashable_args(i) for i in item]
if isinstance(item, dict):
return {k: self._recursive_create_hashable_args(v) for k, v in item.items()}
elif "module: promptflow.connections" in str(item) or "object at" in str(item):
return []
else:
return item
def _parse_output_generator(self, output):
"""
Special handling for generator type. Since pickle will not work for generator.
Returns the real list for reocrding, and create a generator for original output.
Parse output has a simplified hypothesis: output is simple dict, list or generator,
because a full schema of output is too heavy to handle.
Example: {"answer": <generator>, "a": "b"}, <generator>
"""
output_type = ""
output_value = None
output_generator = None
if isinstance(output, dict):
output_value = {}
output_generator = {}
for item in output.items():
k, v = item
if type(v).__name__ == "generator":
vlist = list(v)
def vgenerator():
for vitem in vlist:
yield vitem
output_value[k] = vlist
output_generator[k] = vgenerator()
output_type = "dict[generator]"
else:
output_value[k] = v
elif type(output).__name__ == "generator":
output_value = list(output)
def generator():
for item in output_value:
yield item
output_generator = generator()
output_type = "generator"
else:
output_value = output
output_generator = None
output_type = type(output).__name__
return output_value, output_generator, output_type
def _create_output_generator(self, output, output_type):
"""
Special handling for generator type.
Returns a generator for original output.
Create output has a simplified hypothesis:
All list with output type generator is treated as generator.
"""
output_generator = None
if output_type == "dict[generator]":
output_generator = {}
for k, v in output.items():
if type(v).__name__ == "list":
def vgenerator():
for item in v:
yield item
output_generator[k] = vgenerator()
else:
output_generator[k] = v
elif output_type == "generator":
def generator():
for item in output:
yield item
output_generator = generator()
return output_generator
def set_record(self, input_dict: Dict, output):
"""
Set record to local storage, always override the old record.
:param input_dict: input dict of critical AOAI inputs
:type input_dict: OrderedDict
:param output: original output of node run
:type output: object
"""
# filter args, object at will not hash
input_dict = self._recursive_create_hashable_args(input_dict)
hash_value: str = hashlib.sha1(str(sorted(input_dict.items())).encode("utf-8")).hexdigest()
current_saved_records: Dict[str, str] = self.cached_items.get(self._record_file_str, None)
output_value, output_generator, output_type = self._parse_output_generator(output)
if current_saved_records is None:
current_saved_records = {}
current_saved_records[hash_value] = {
"input": input_dict,
"output": output_value,
"output_type": output_type,
}
else:
saved_output = current_saved_records.get(hash_value, None)
if saved_output is not None:
if saved_output["output"] == output_value and saved_output["output_type"] == output_type:
if "generator" in output_type:
return output_generator
else:
return output_value
else:
current_saved_records[hash_value] = {
"input": input_dict,
"output": output_value,
"output_type": output_type,
}
else:
current_saved_records[hash_value] = {
"input": input_dict,
"output": output_value,
"output_type": output_type,
}
self.cached_items[self._record_file_str] = current_saved_records
self._write_file(hash_value)
if "generator" in output_type:
return output_generator
else:
return output_value
@classmethod
def get_test_mode_from_environ(cls) -> str:
return os.getenv(ENVIRON_TEST_MODE, RecordMode.LIVE)
@classmethod
def is_recording_mode(cls) -> bool:
return RecordStorage.get_test_mode_from_environ() == RecordMode.RECORD
@classmethod
def is_replaying_mode(cls) -> bool:
return RecordStorage.get_test_mode_from_environ() == RecordMode.REPLAY
@classmethod
def is_live_mode(cls) -> bool:
return RecordStorage.get_test_mode_from_environ() == RecordMode.LIVE
@classmethod
def get_instance(cls, record_file=None) -> "RecordStorage":
"""
Use this to get instance to avoid multiple copies of same record storage.
:param record_file: initiate at first entrance, defaults to None in the first call will raise exception.
:type record_file: str or Path, optional
:return: instance of RecordStorage
:rtype: RecordStorage
"""
# if not in recording mode, return None
if not (RecordStorage.is_recording_mode() or RecordStorage.is_replaying_mode()):
return None
# Create instance if not exist
if cls._instance is None:
if record_file is None:
raise RecordFileMissingException("record_file is value None")
cls._instance = RecordStorage(record_file)
if record_file is not None:
cls._instance.record_file = record_file
return cls._instance
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/recording_utilities/mock_tool.py | import functools
import inspect
from promptflow._core.tool import STREAMING_OPTION_PARAMETER_ATTR, ToolType
from promptflow._core.tracer import TraceType, _create_trace_from_function_call
from .record_storage import RecordFileMissingException, RecordItemMissingException, RecordStorage
# recording array is a global variable to store the function names that need to be recorded
recording_array = ["fetch_text_content_from_url", "my_python_tool"]
def recording_array_extend(items):
global recording_array
recording_array.extend(items)
def recording_array_reset():
global recording_array
recording_array = ["fetch_text_content_from_url", "my_python_tool"]
def _prepare_input_dict(func, args, kwargs):
"""Prepare input dict for record storage"""
if func.__name__ == "partial":
func_wo_partial = func.func
else:
func_wo_partial = func
input_dict = {}
for key in kwargs:
input_dict[key] = kwargs[key]
if type(func).__name__ == "partial":
input_dict["_args"] = func.args
for key in func.keywords:
input_dict[key] = func.keywords[key]
else:
input_dict["_args"] = []
input_dict["_func"] = func_wo_partial.__qualname__
return input_dict
def _replace_tool_rule(func):
"""Replace tool with the following rules."""
global recording_array
if func.__name__ == "partial":
func_wo_partial = func.func
else:
func_wo_partial = func
if func_wo_partial.__qualname__.startswith("AzureOpenAI"):
return True
elif func_wo_partial.__qualname__.startswith("OpenAI"):
return True
elif func_wo_partial.__module__ == "promptflow.tools.aoai":
return True
elif func_wo_partial.__module__ == "promptflow.tools.openai_gpt4v":
return True
elif func_wo_partial.__module__ == "promptflow.tools.openai":
return True
elif func_wo_partial.__qualname__ in recording_array:
return True
else:
return False
def call_func(func, args, kwargs):
input_dict = _prepare_input_dict(func, args, kwargs)
if RecordStorage.is_replaying_mode():
return RecordStorage.get_instance().get_record(input_dict)
# Record mode will record item to record file
elif RecordStorage.is_recording_mode():
try:
# prevent recording the same item twice
obj = RecordStorage.get_instance().get_record(input_dict)
except (RecordItemMissingException, RecordFileMissingException):
# recording the item
obj = RecordStorage.get_instance().set_record(input_dict, func(*args, **kwargs))
return obj
async def call_func_async(func, args, kwargs):
input_dict = _prepare_input_dict(func, args, kwargs)
if RecordStorage.is_replaying_mode():
return RecordStorage.get_instance().get_record(input_dict)
# Record mode will record item to record file
elif RecordStorage.is_recording_mode():
try:
# prevent recording the same item twice
obj = RecordStorage.get_instance().get_record(input_dict)
except (RecordItemMissingException, RecordFileMissingException):
# recording the item
obj = RecordStorage.get_instance().set_record(input_dict, await func(*args, **kwargs))
return obj
def mock_tool(original_tool):
"""
Basically this is the original tool decorator.
The key modification is, at every func(*args, **argv) call. There is a surrounding record/replay logic:
if replay:
return replay:
elif record:
if recorded:
return recorded
call func(*args, **argv) and record the result
Actually it needn't to be such a long function, but tool decorator should not trigger a long stack trace.
"""
def tool(
func=None,
*args_mock,
name: str = None,
description: str = None,
type: str = None,
input_settings=None,
streaming_option_parameter=None,
**kwargs_mock,
):
def tool_decorator(func):
from promptflow.exceptions import UserErrorException
def create_trace(func, args, kwargs):
return _create_trace_from_function_call(func, args=args, kwargs=kwargs, trace_type=TraceType.TOOL)
if inspect.iscoroutinefunction(func):
@functools.wraps(func)
async def decorated_tool(*args, **kwargs):
from promptflow._core.tracer import Tracer
if Tracer.active_instance() is None:
return await call_func_async(func, args, kwargs)
try:
Tracer.push(create_trace(func, args, kwargs))
output = await call_func_async(func, args, kwargs)
return Tracer.pop(output)
except Exception as e:
Tracer.pop(None, e)
raise
new_f = decorated_tool
else:
@functools.wraps(func)
def decorated_tool(*args, **kwargs):
from promptflow._core.tracer import Tracer
if Tracer.active_instance() is None:
return call_func(func, args, kwargs)
try:
Tracer.push(create_trace(func, args, kwargs))
output = call_func(func, args, kwargs)
return Tracer.pop(output)
except Exception as e:
Tracer.pop(None, e)
raise
new_f = decorated_tool
if type is not None and type not in [k.value for k in ToolType]:
raise UserErrorException(f"Tool type {type} is not supported yet.")
new_f.__original_function = func
new_f.__tool = None # This will be set when generating the tool definition.
new_f.__name = name
new_f.__description = description
new_f.__type = type
new_f.__input_settings = input_settings
new_f.__extra_info = kwargs_mock
if streaming_option_parameter and isinstance(streaming_option_parameter, str):
setattr(new_f, STREAMING_OPTION_PARAMETER_ATTR, streaming_option_parameter)
return new_f
# tool replacements.
if func is not None:
if not _replace_tool_rule(func):
return original_tool(
func,
*args_mock,
name=name,
description=description,
type=type,
input_settings=input_settings,
**kwargs_mock,
)
return tool_decorator(func)
return original_tool( # no recording for @tool(name="func_name")
func,
*args_mock,
name=name,
description=description,
type=type,
input_settings=input_settings,
**kwargs_mock,
)
return tool
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/recording_utilities/constants.py | ENVIRON_TEST_MODE = "PROMPT_FLOW_TEST_MODE"
class RecordMode:
LIVE = "live"
RECORD = "record"
REPLAY = "replay"
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_as_func.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import shutil
from pathlib import Path
from tempfile import TemporaryDirectory
from types import GeneratorType
import pytest
from promptflow import load_flow
from promptflow._sdk._errors import ConnectionNotFoundError, InvalidFlowError
from promptflow._sdk.entities import CustomConnection
from promptflow._sdk.operations._flow_context_resolver import FlowContextResolver
from promptflow._utils.flow_utils import dump_flow_dag, load_flow_dag
from promptflow.entities import FlowContext
from promptflow.exceptions import UserErrorException
FLOWS_DIR = "./tests/test_configs/flows"
RUNS_DIR = "./tests/test_configs/runs"
DATAS_DIR = "./tests/test_configs/datas"
@pytest.mark.usefixtures(
"use_secrets_config_file", "recording_injection", "setup_local_connection", "install_custom_tool_pkg"
)
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestFlowAsFunc:
def test_flow_as_a_func(self):
f = load_flow(f"{FLOWS_DIR}/print_env_var")
result = f(key="unknown")
assert result["output"] is None
assert "line_number" not in result
def test_flow_as_a_func_with_connection_overwrite(self):
from promptflow._sdk._errors import ConnectionNotFoundError
f = load_flow(f"{FLOWS_DIR}/web_classification")
f.context.connections = {"classify_with_llm": {"connection": "not_exist"}}
with pytest.raises(ConnectionNotFoundError) as e:
f(url="https://www.youtube.com/watch?v=o5ZQyXaAv1g")
assert "Connection 'not_exist' is not found" in str(e.value)
def test_flow_as_a_func_with_connection_obj(self):
f = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
f.context.connections = {"hello_node": {"connection": CustomConnection(secrets={"k": "v"})}}
result = f(text="hello")
assert result["output"]["secrets"] == {"k": "v"}
def test_overrides(self):
f = load_flow(f"{FLOWS_DIR}/print_env_var")
f.context = FlowContext(
# node print_env will take "provided_key" instead of flow input
overrides={"nodes.print_env.inputs.key": "provided_key"},
)
# the key="unknown" will not take effect
result = f(key="unknown")
assert result["output"] is None
@pytest.mark.skip(reason="This experience has not finalized yet.")
def test_flow_as_a_func_with_token_based_connection(self):
class MyCustomConnection(CustomConnection):
def get_token(self):
return "fake_token"
f = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
f.context.connections = {"hello_node": {"connection": MyCustomConnection(secrets={"k": "v"})}}
result = f(text="hello")
assert result == {}
def test_exception_handle(self):
f = load_flow(f"{FLOWS_DIR}/flow_with_invalid_import")
with pytest.raises(UserErrorException) as e:
f(text="hello")
assert "Failed to load python module " in str(e.value)
f = load_flow(f"{FLOWS_DIR}/print_env_var")
with pytest.raises(UserErrorException) as e:
f()
assert "Required input fields ['key'] are missing" in str(e.value)
def test_stream_output(self):
f = load_flow(f"{FLOWS_DIR}/chat_flow_with_python_node_streaming_output")
f.context.streaming = True
result = f(
chat_history=[
{"inputs": {"chat_input": "Hi"}, "outputs": {"chat_output": "Hello! How can I assist you today?"}}
],
question="How are you?",
)
assert isinstance(result["answer"], GeneratorType)
@pytest.mark.skip(reason="This experience has not finalized yet.")
def test_environment_variables(self):
f = load_flow(f"{FLOWS_DIR}/print_env_var")
f.context.environment_variables = {"key": "value"}
result = f(key="key")
assert result["output"] == "value"
def test_flow_as_a_func_with_variant(self):
flow_path = Path(f"{FLOWS_DIR}/flow_with_dict_input_with_variant").absolute()
f = load_flow(
flow_path,
)
f.context.variant = "${print_val.variant1}"
# variant1 will use a mock_custom_connection
with pytest.raises(ConnectionNotFoundError) as e:
f(key="a")
assert "Connection 'mock_custom_connection' is not found." in str(e.value)
# non-exist variant
f.context.variant = "${print_val.variant_2}"
with pytest.raises(InvalidFlowError) as e:
f(key="a")
assert "Variant variant_2 not found for node print_val" in str(e.value)
def test_non_scrubbed_connection(self):
f = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
f.context.connections = {"hello_node": {"connection": CustomConnection(secrets={"k": "*****"})}}
with pytest.raises(UserErrorException) as e:
f(text="hello")
assert "please make sure connection has decrypted secrets to use in flow execution." in str(e)
def test_local_connection_object(self, pf, azure_open_ai_connection):
f = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
# local connection without secret will lead to error
connection = pf.connections.get("azure_open_ai_connection", with_secrets=False)
f.context.connections = {"hello_node": {"connection": connection}}
with pytest.raises(UserErrorException) as e:
f(text="hello")
assert "please make sure connection has decrypted secrets to use in flow execution." in str(e)
def test_non_secret_connection(self):
f = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
# execute connection without secrets won't get error since the connection doesn't have scrubbed secrets
# we only raise error when there are scrubbed secrets in connection
f.context.connections = {"hello_node": {"connection": CustomConnection(secrets={})}}
f(text="hello")
def test_flow_context_cache(self):
# same flow context has same hash
assert hash(FlowContext()) == hash(FlowContext())
# getting executor for same flow will hit cache
flow1 = load_flow(f"{FLOWS_DIR}/print_env_var")
flow2 = load_flow(f"{FLOWS_DIR}/print_env_var")
flow_executor1 = FlowContextResolver.resolve(
flow=flow1,
)
flow_executor2 = FlowContextResolver.resolve(
flow=flow2,
)
assert flow_executor1 is flow_executor2
# getting executor for same flow + context will hit cache
flow1 = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
flow1.context = FlowContext(connections={"hello_node": {"connection": CustomConnection(secrets={"k": "v"})}})
flow2 = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
flow2.context = FlowContext(connections={"hello_node": {"connection": CustomConnection(secrets={"k": "v"})}})
flow_executor1 = FlowContextResolver.resolve(
flow=flow1,
)
flow_executor2 = FlowContextResolver.resolve(
flow=flow2,
)
assert flow_executor1 is flow_executor2
flow1 = load_flow(f"{FLOWS_DIR}/flow_with_dict_input_with_variant")
flow1.context = FlowContext(
variant="${print_val.variant1}",
connections={"print_val": {"conn": CustomConnection(secrets={"k": "v"})}},
overrides={"nodes.print_val.inputs.key": "a"},
)
flow2 = load_flow(f"{FLOWS_DIR}/flow_with_dict_input_with_variant")
flow2.context = FlowContext(
variant="${print_val.variant1}",
connections={"print_val": {"conn": CustomConnection(secrets={"k": "v"})}},
overrides={"nodes.print_val.inputs.key": "a"},
)
flow_executor1 = FlowContextResolver.resolve(flow=flow1)
flow_executor2 = FlowContextResolver.resolve(flow=flow2)
assert flow_executor1 is flow_executor2
def test_flow_cache_not_hit(self):
with TemporaryDirectory() as tmp_dir:
shutil.copytree(f"{FLOWS_DIR}/print_env_var", f"{tmp_dir}/print_env_var")
flow_path = Path(f"{tmp_dir}/print_env_var")
# load same file with different content will not hit cache
flow1 = load_flow(flow_path)
# update content
_, flow_dag = load_flow_dag(flow_path)
flow_dag["inputs"] = {"key": {"type": "string", "default": "key1"}}
dump_flow_dag(flow_dag, flow_path)
flow2 = load_flow(f"{tmp_dir}/print_env_var")
flow_executor1 = FlowContextResolver.resolve(
flow=flow1,
)
flow_executor2 = FlowContextResolver.resolve(
flow=flow2,
)
assert flow_executor1 is not flow_executor2
def test_flow_context_cache_not_hit(self):
flow1 = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
flow1.context = FlowContext(connections={"hello_node": {"connection": CustomConnection(secrets={"k": "v"})}})
flow2 = load_flow(f"{FLOWS_DIR}/flow_with_custom_connection")
flow2.context = FlowContext(connections={"hello_node": {"connection": CustomConnection(secrets={"k2": "v"})}})
flow_executor1 = FlowContextResolver.resolve(
flow=flow1,
)
flow_executor2 = FlowContextResolver.resolve(
flow=flow2,
)
assert flow_executor1 is not flow_executor2
flow1 = load_flow(f"{FLOWS_DIR}/flow_with_dict_input_with_variant")
flow1.context = FlowContext(
variant="${print_val.variant1}",
connections={"print_val": {"conn": CustomConnection(secrets={"k": "v"})}},
overrides={"nodes.print_val.inputs.key": "a"},
)
flow2 = load_flow(f"{FLOWS_DIR}/flow_with_dict_input_with_variant")
flow2.context = FlowContext(
variant="${print_val.variant1}",
connections={"print_val": {"conn": CustomConnection(secrets={"k": "v"})}},
overrides={"nodes.print_val.inputs.key": "b"},
)
flow_executor1 = FlowContextResolver.resolve(flow=flow1)
flow_executor2 = FlowContextResolver.resolve(flow=flow2)
assert flow_executor1 is not flow_executor2
@pytest.mark.timeout(10)
def test_flow_as_func_perf_test(self):
# this test should not take long due to caching logic
f = load_flow(f"{FLOWS_DIR}/print_env_var")
for i in range(100):
f(key="key")
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_connection.py | import uuid
from pathlib import Path
import pydash
import pytest
from promptflow._sdk._constants import SCRUBBED_VALUE
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk.entities import AzureOpenAIConnection, CustomConnection
_client = PFClient()
TEST_ROOT = Path(__file__).parent.parent.parent
CONNECTION_ROOT = TEST_ROOT / "test_configs/connections"
@pytest.mark.cli_test
@pytest.mark.e2etest
class TestConnection:
def test_connection_operations(self):
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = AzureOpenAIConnection(name=name, api_key="test", api_base="test")
# Create
_client.connections.create_or_update(conn)
# Get
result = _client.connections.get(name)
assert pydash.omit(result._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "azure_open_ai",
"api_key": "******",
"api_base": "test",
"api_type": "azure",
"api_version": "2023-07-01-preview",
}
# Update
conn.api_base = "test2"
result = _client.connections.create_or_update(conn)
assert pydash.omit(result._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "azure_open_ai",
"api_key": "******",
"api_base": "test2",
"api_type": "azure",
"api_version": "2023-07-01-preview",
}
# List
result = _client.connections.list()
assert len(result) > 0
# Delete
_client.connections.delete(name)
with pytest.raises(Exception) as e:
_client.connections.get(name)
assert "is not found." in str(e.value)
def test_connection_get_and_update(self):
# Test api key not updated
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = AzureOpenAIConnection(name=name, api_key="test", api_base="test")
result = _client.connections.create_or_update(conn)
assert result.api_key == SCRUBBED_VALUE
# Update api_base only Assert no exception
result.api_base = "test2"
result = _client.connections.create_or_update(result)
assert result._to_dict()["api_base"] == "test2"
# Assert value not scrubbed
assert result._secrets["api_key"] == "test"
_client.connections.delete(name)
# Invalid update
with pytest.raises(Exception) as e:
result._secrets = {}
_client.connections.create_or_update(result)
assert "secrets ['api_key'] value invalid, please fill them" in str(e.value)
def test_custom_connection_get_and_update(self):
# Test api key not updated
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = CustomConnection(name=name, secrets={"api_key": "test"}, configs={"api_base": "test"})
result = _client.connections.create_or_update(conn)
assert result.secrets["api_key"] == SCRUBBED_VALUE
# Update api_base only Assert no exception
result.configs["api_base"] = "test2"
result = _client.connections.create_or_update(result)
assert result._to_dict()["configs"]["api_base"] == "test2"
# Assert value not scrubbed
assert result._secrets["api_key"] == "test"
_client.connections.delete(name)
# Invalid update
with pytest.raises(Exception) as e:
result._secrets = {}
_client.connections.create_or_update(result)
assert "secrets ['api_key'] value invalid, please fill them" in str(e.value)
@pytest.mark.parametrize(
"file_name, expected_updated_item, expected_secret_item",
[
("azure_openai_connection.yaml", ("api_base", "new_value"), ("api_key", "<to-be-replaced>")),
("custom_connection.yaml", ("key1", "new_value"), ("key2", "test2")),
],
)
def test_upsert_connection_from_file(self, file_name, expected_updated_item, expected_secret_item):
from promptflow._cli._pf._connection import _upsert_connection_from_file
name = f"Connection_{str(uuid.uuid4())[:4]}"
result = _upsert_connection_from_file(file=CONNECTION_ROOT / file_name, params_override=[{"name": name}])
assert result is not None
update_file_name = f"update_{file_name}"
result = _upsert_connection_from_file(file=CONNECTION_ROOT / update_file_name, params_override=[{"name": name}])
# Test secrets not updated, and configs updated
assert (
result.configs[expected_updated_item[0]] == expected_updated_item[1]
), "Assert configs updated failed, expected: {}, actual: {}".format(
expected_updated_item[1], result.configs[expected_updated_item[0]]
)
assert (
result._secrets[expected_secret_item[0]] == expected_secret_item[1]
), "Assert secrets not updated failed, expected: {}, actual: {}".format(
expected_secret_item[1], result._secrets[expected_secret_item[0]]
)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_test.py | import logging
import tempfile
from pathlib import Path
from types import GeneratorType
import papermill
import pytest
from marshmallow import ValidationError
from promptflow._sdk._constants import LOGGER_NAME
from promptflow._sdk._pf_client import PFClient
from promptflow.exceptions import UserErrorException
PROMOTFLOW_ROOT = Path(__file__) / "../../../.."
TEST_ROOT = Path(__file__).parent.parent.parent
MODEL_ROOT = TEST_ROOT / "test_configs/e2e_samples"
CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix()
FLOWS_DIR = (TEST_ROOT / "test_configs/flows").resolve().absolute().as_posix()
EAGER_FLOWS_DIR = (TEST_ROOT / "test_configs/eager_flows").resolve().absolute().as_posix()
FLOW_RESULT_KEYS = ["category", "evidence"]
_client = PFClient()
@pytest.mark.usefixtures(
"use_secrets_config_file", "recording_injection", "setup_local_connection", "install_custom_tool_pkg"
)
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestFlowTest:
def test_pf_test_flow(self):
inputs = {"url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g", "answer": "Channel", "evidence": "Url"}
flow_path = Path(f"{FLOWS_DIR}/web_classification").absolute()
result = _client.test(flow=flow_path, inputs=inputs)
assert all([key in FLOW_RESULT_KEYS for key in result])
result = _client.test(flow=f"{FLOWS_DIR}/web_classification")
assert all([key in FLOW_RESULT_KEYS for key in result])
def test_pf_test_flow_with_package_tool_with_custom_strong_type_connection(self, install_custom_tool_pkg):
inputs = {"text": "Hello World!"}
flow_path = Path(f"{FLOWS_DIR}/flow_with_package_tool_with_custom_strong_type_connection").absolute()
# Test that connection would be custom strong type in flow
result = _client.test(flow=flow_path, inputs=inputs)
assert result == {"out": "connection_value is MyFirstConnection: True"}
# Test node run
result = _client.test(flow=flow_path, inputs={"input_text": "Hello World!"}, node="My_Second_Tool_usi3")
assert result == "Hello World!This is my first custom connection."
def test_pf_test_flow_with_package_tool_with_custom_connection_as_input_value(self, install_custom_tool_pkg):
# Prepare custom connection
from promptflow.connections import CustomConnection
conn = CustomConnection(name="custom_connection_3", secrets={"api_key": "test"}, configs={"api_base": "test"})
_client.connections.create_or_update(conn)
inputs = {"text": "Hello World!"}
flow_path = Path(f"{FLOWS_DIR}/flow_with_package_tool_with_custom_connection").absolute()
# Test that connection would be custom strong type in flow
result = _client.test(flow=flow_path, inputs=inputs)
assert result == {"out": "connection_value is MyFirstConnection: True"}
def test_pf_test_flow_with_script_tool_with_custom_strong_type_connection(self):
# Prepare custom connection
from promptflow.connections import CustomConnection
conn = CustomConnection(name="custom_connection_2", secrets={"api_key": "test"}, configs={"api_url": "test"})
_client.connections.create_or_update(conn)
inputs = {"text": "Hello World!"}
flow_path = Path(f"{FLOWS_DIR}/flow_with_script_tool_with_custom_strong_type_connection").absolute()
# Test that connection would be custom strong type in flow
result = _client.test(flow=flow_path, inputs=inputs)
assert result == {"out": "connection_value is MyCustomConnection: True"}
# Test node run
result = _client.test(flow=flow_path, inputs={"input_param": "Hello World!"}, node="my_script_tool")
assert result == "connection_value is MyCustomConnection: True"
def test_pf_test_with_streaming_output(self):
flow_path = Path(f"{FLOWS_DIR}/chat_flow_with_stream_output")
result = _client.test(flow=flow_path)
chat_output = result["answer"]
assert isinstance(chat_output, GeneratorType)
assert "".join(chat_output)
flow_path = Path(f"{FLOWS_DIR}/basic_with_builtin_llm_node")
result = _client.test(flow=flow_path)
chat_output = result["output"]
assert isinstance(chat_output, str)
def test_pf_test_node(self):
inputs = {"classify_with_llm.output": '{"category": "App", "evidence": "URL"}'}
flow_path = Path(f"{FLOWS_DIR}/web_classification").absolute()
result = _client.test(flow=flow_path, inputs=inputs, node="convert_to_dict")
assert all([key in FLOW_RESULT_KEYS for key in result])
def test_pf_test_flow_with_variant(self):
inputs = {"url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g", "answer": "Channel", "evidence": "Url"}
result = _client.test(
flow=f"{FLOWS_DIR}/web_classification", inputs=inputs, variant="${summarize_text_content.variant_1}"
)
assert all([key in FLOW_RESULT_KEYS for key in result])
@pytest.mark.skip("TODO this test case failed in windows and Mac")
def test_pf_test_with_additional_includes(self, caplog):
from promptflow import VERSION
print(VERSION)
with caplog.at_level(level=logging.WARNING, logger=LOGGER_NAME):
inputs = {"url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g", "answer": "Channel", "evidence": "Url"}
result = _client.test(flow=f"{FLOWS_DIR}/web_classification_with_additional_include", inputs=inputs)
duplicate_file_content = "Found duplicate file in additional includes"
assert any([duplicate_file_content in record.message for record in caplog.records])
assert all([key in FLOW_RESULT_KEYS for key in result])
inputs = {"classify_with_llm.output": '{"category": "App", "evidence": "URL"}'}
result = _client.test(flow=f"{FLOWS_DIR}/web_classification", inputs=inputs, node="convert_to_dict")
assert all([key in FLOW_RESULT_KEYS for key in result])
# Test additional includes don't exist
with pytest.raises(UserErrorException) as e:
_client.test(flow=f"{FLOWS_DIR}/web_classification_with_invalid_additional_include")
assert "Unable to find additional include ../invalid/file/path" in str(e.value)
def test_pf_flow_test_with_symbolic(self, prepare_symbolic_flow):
inputs = {"url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g", "answer": "Channel", "evidence": "Url"}
result = _client.test(flow=f"{FLOWS_DIR}/web_classification_with_additional_include", inputs=inputs)
assert all([key in FLOW_RESULT_KEYS for key in result])
inputs = {"classify_with_llm.output": '{"category": "App", "evidence": "URL"}'}
result = _client.test(flow=f"{FLOWS_DIR}/web_classification", inputs=inputs, node="convert_to_dict")
assert all([key in FLOW_RESULT_KEYS for key in result])
def test_pf_flow_test_with_exception(self, capsys):
# Test flow with exception
inputs = {"url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g", "answer": "Channel", "evidence": "Url"}
flow_path = Path(f"{FLOWS_DIR}/web_classification_with_exception").absolute()
with pytest.raises(UserErrorException) as exception:
_client.test(flow=flow_path, inputs=inputs)
assert "Execution failure in 'convert_to_dict': (Exception) mock exception" in str(exception.value)
# Test node with exception
inputs = {"classify_with_llm.output": '{"category": "App", "evidence": "URL"}'}
with pytest.raises(Exception) as exception:
_client.test(flow=flow_path, inputs=inputs, node="convert_to_dict")
output = capsys.readouterr()
assert "convert_to_dict.py" in output.out
assert "mock exception" in str(exception.value)
def test_node_test_with_connection_input(self):
flow_path = Path(f"{FLOWS_DIR}/basic-with-connection").absolute()
inputs = {
"connection": "azure_open_ai_connection",
"hello_prompt.output": "system:\n Your task is to write python program for me\nuser:\n"
"Write a simple Hello World! program that displays "
"the greeting message when executed.",
}
result = _client.test(
flow=flow_path,
inputs=inputs,
node="echo_my_prompt",
environment_variables={"API_TYPE": "${azure_open_ai_connection.api_type}"},
)
assert result
def test_pf_flow_with_aggregation(self):
flow_path = Path(f"{FLOWS_DIR}/classification_accuracy_evaluation").absolute()
inputs = {"variant_id": "variant_0", "groundtruth": "Pdf", "prediction": "PDF"}
result = _client._flows._test(flow=flow_path, inputs=inputs)
assert "calculate_accuracy" in result.node_run_infos
assert result.run_info.metrics == {"accuracy": 1.0}
def test_generate_tool_meta_in_additional_folder(self):
flow_path = Path(f"{FLOWS_DIR}/web_classification_with_additional_include").absolute()
flow_tools, _ = _client._flows._generate_tools_meta(flow=flow_path)
for tool in flow_tools["code"].values():
assert (Path(flow_path) / tool["source"]).exists()
def test_pf_test_with_non_english_input(self):
result = _client.test(flow=f"{FLOWS_DIR}/flow_with_non_english_input")
assert result["output"] == "Hello 日本語"
def test_pf_node_test_with_dict_input(self):
flow_path = Path(f"{FLOWS_DIR}/flow_with_dict_input").absolute()
flow_inputs = {"key": {"input_key": "input_value"}}
result = _client._flows._test(flow=flow_path, inputs=flow_inputs)
assert result.run_info.status.value == "Completed"
inputs = {
"get_dict_val.output.value": result.node_run_infos["get_dict_val"].output,
"get_dict_val.output.origin_value": result.node_run_infos["get_dict_val"].output,
}
node_result = _client._flows._test(flow=flow_path, node="print_val", inputs=inputs)
assert node_result.status.value == "Completed"
inputs = {
"val": result.node_run_infos["get_dict_val"].output,
"origin_val": result.node_run_infos["get_dict_val"].output
}
node_result = _client._flows._test(flow=flow_path, node="print_val", inputs=inputs)
assert node_result.status.value == "Completed"
def test_pf_node_test_with_node_ref(self):
flow_path = Path(f"{FLOWS_DIR}/flow_with_dict_input").absolute()
flow_inputs = {"key": {"input_key": "input_value"}}
result = _client._flows._test(flow=flow_path, inputs=flow_inputs)
assert result.run_info.status.value == "Completed"
# Test node ref with reference node output names
inputs = {
"get_dict_val.output.value": result.node_run_infos["get_dict_val"].output["value"],
"get_dict_val.output.origin_value": result.node_run_infos["get_dict_val"].output["origin_value"],
}
ref_result = _client._flows._test(flow=flow_path, node="print_val", inputs=inputs)
assert ref_result.status.value == "Completed"
# Test node ref with testing node input names
inputs = {
"val": result.node_run_infos["get_dict_val"].output["value"],
"origin_val": result.node_run_infos["get_dict_val"].output["origin_value"],
}
variable_result = _client._flows._test(flow=flow_path, node="print_val", inputs=inputs)
assert variable_result.status.value == "Completed"
def test_pf_test_flow_in_notebook(self):
notebook_path = Path(f"{TEST_ROOT}/test_configs/notebooks/dummy.ipynb").absolute()
with tempfile.TemporaryDirectory() as temp_dir:
output_notebook_path = Path(temp_dir) / "output.ipynb"
papermill.execute_notebook(
notebook_path,
output_path=output_notebook_path,
cwd=notebook_path.parent,
)
def test_eager_flow_test(self):
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_without_yaml/entry.py").absolute()
result = _client._flows._test(flow=flow_path, entry="my_flow", inputs={"input_val": "val1"})
assert result.run_info.status.value == "Completed"
def test_eager_flow_test_with_yaml(self):
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_with_yaml/").absolute()
result = _client._flows._test(flow=flow_path, inputs={"input_val": "val1"})
assert result.run_info.status.value == "Completed"
def test_eager_flow_test_with_primitive_output(self):
flow_path = Path(f"{EAGER_FLOWS_DIR}/primitive_output/").absolute()
result = _client._flows._test(flow=flow_path, inputs={"input_val": "val1"})
assert result.run_info.status.value == "Completed"
def test_eager_flow_test_invalid_cases(self):
# no entry provided
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_without_yaml/entry.py").absolute()
with pytest.raises(UserErrorException) as e:
_client._flows._test(flow=flow_path, inputs={"input_val": "val1"})
assert "Entry function is not specified" in str(e.value)
# no path provided
flow_path = Path(f"{EAGER_FLOWS_DIR}/invalid_no_path/").absolute()
with pytest.raises(ValidationError) as e:
_client._flows._test(flow=flow_path, inputs={"input_val": "val1"})
assert "'path': ['Missing data for required field.']" in str(e.value)
# dup entries provided
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_with_yaml/").absolute()
with pytest.raises(UserErrorException) as e:
_client._flows._test(flow=flow_path, entry="my_flow", inputs={"input_val": "val1"})
assert "Specifying entry function is not allowed" in str(e.value)
# wrong entry provided
# required inputs not provided
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_serve_azureml_extension.py | import json
import pytest
@pytest.mark.usefixtures("recording_injection")
@pytest.mark.e2etest
def test_azureml_serving_api_with_encoded_connection(flow_serving_client_with_encoded_connection):
response = flow_serving_client_with_encoded_connection.get("/health")
assert b'{"status":"Healthy","version":"0.0.1"}' in response.data
response = flow_serving_client_with_encoded_connection.post("/score", data=json.dumps({"text": "hi"}))
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
assert "output_prompt" in json.loads(response.data.decode())
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_custom_strong_type_connection.py | import uuid
from pathlib import Path
import pydash
import pytest
from promptflow._sdk._constants import SCRUBBED_VALUE, CustomStrongTypeConnectionConfigs
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk.entities import CustomStrongTypeConnection
from promptflow.contracts.types import Secret
class MyCustomConnection(CustomStrongTypeConnection):
api_key: Secret
api_base: str
_client = PFClient()
TEST_ROOT = Path(__file__).parent.parent.parent
CONNECTION_ROOT = TEST_ROOT / "test_configs/connections"
@pytest.mark.cli_test
@pytest.mark.e2etest
class TestCustomStrongTypeConnection:
def test_connection_operations(self):
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = MyCustomConnection(name=name, secrets={"api_key": "test"}, configs={"api_base": "test"})
# Create
_client.connections.create_or_update(conn)
# Get
result = _client.connections.get(name)
assert pydash.omit(result._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "custom",
"configs": {
"api_base": "test",
"promptflow.connection.custom_type": "MyCustomConnection",
"promptflow.connection.module": "sdk_cli_test.e2etests.test_custom_strong_type_connection",
},
"secrets": {"api_key": "******"},
}
# Update
conn.configs["api_base"] = "test2"
result = _client.connections.create_or_update(conn)
assert pydash.omit(result._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "custom",
"configs": {
"api_base": "test2",
"promptflow.connection.custom_type": "MyCustomConnection",
"promptflow.connection.module": "sdk_cli_test.e2etests.test_custom_strong_type_connection",
},
"secrets": {"api_key": "******"},
}
# List
result = _client.connections.list()
assert len(result) > 0
# Delete
_client.connections.delete(name)
with pytest.raises(Exception) as e:
_client.connections.get(name)
assert "is not found." in str(e.value)
def test_connection_update(self):
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = MyCustomConnection(name=name, secrets={"api_key": "test"}, configs={"api_base": "test"})
# Create
_client.connections.create_or_update(conn)
# Get
custom_conn = _client.connections.get(name)
assert pydash.omit(custom_conn._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "custom",
"configs": {
"api_base": "test",
"promptflow.connection.custom_type": "MyCustomConnection",
"promptflow.connection.module": "sdk_cli_test.e2etests.test_custom_strong_type_connection",
},
"secrets": {"api_key": "******"},
}
# Update
custom_conn.configs["api_base"] = "test2"
result = _client.connections.create_or_update(custom_conn)
assert pydash.omit(result._to_dict(), ["created_date", "last_modified_date", "name"]) == {
"module": "promptflow.connections",
"type": "custom",
"configs": {
"api_base": "test2",
"promptflow.connection.custom_type": "MyCustomConnection",
"promptflow.connection.module": "sdk_cli_test.e2etests.test_custom_strong_type_connection",
},
"secrets": {"api_key": "******"},
}
# List
result = _client.connections.list()
assert len(result) > 0
# Delete
_client.connections.delete(name)
with pytest.raises(Exception) as e:
_client.connections.get(name)
assert "is not found." in str(e.value)
def test_connection_get_and_update(self):
# Test api key not updated
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = MyCustomConnection(name=name, secrets={"api_key": "test"}, configs={"api_base": "test"})
result = _client.connections.create_or_update(conn)
assert result.secrets["api_key"] == SCRUBBED_VALUE
# Update api_base only Assert no exception
result.configs["api_base"] = "test2"
result = _client.connections.create_or_update(result)
assert result._to_dict()["configs"]["api_base"] == "test2"
# Assert value not scrubbed
assert result._secrets["api_key"] == "test"
_client.connections.delete(name)
# Invalid update
with pytest.raises(Exception) as e:
result._secrets = {}
_client.connections.create_or_update(result)
assert "secrets ['api_key'] value invalid, please fill them" in str(e.value)
def test_connection_get_and_update_with_key(self):
# Test api key not updated
name = f"Connection_{str(uuid.uuid4())[:4]}"
conn = MyCustomConnection(name=name, secrets={"api_key": "test"}, configs={"api_base": "test"})
assert conn.api_base == "test"
assert conn.configs["api_base"] == "test"
result = _client.connections.create_or_update(conn)
converted_conn = result._convert_to_custom_strong_type(
module=__class__.__module__, to_class="MyCustomConnection"
)
assert isinstance(converted_conn, MyCustomConnection)
assert converted_conn.api_base == "test"
converted_conn.api_base = "test2"
assert converted_conn.api_base == "test2"
assert converted_conn.configs["api_base"] == "test2"
@pytest.mark.parametrize(
"file_name, expected_updated_item, expected_secret_item",
[
("custom_strong_type_connection.yaml", ("api_base", "new_value"), ("api_key", "<to-be-replaced>")),
],
)
def test_upsert_connection_from_file(
self, install_custom_tool_pkg, file_name, expected_updated_item, expected_secret_item
):
from promptflow._cli._pf._connection import _upsert_connection_from_file
name = f"Connection_{str(uuid.uuid4())[:4]}"
result = _upsert_connection_from_file(file=CONNECTION_ROOT / file_name, params_override=[{"name": name}])
assert result is not None
assert result.configs[CustomStrongTypeConnectionConfigs.PROMPTFLOW_MODULE_KEY] == "my_tool_package.connections"
update_file_name = f"update_{file_name}"
result = _upsert_connection_from_file(file=CONNECTION_ROOT / update_file_name, params_override=[{"name": name}])
# Test secrets not updated, and configs updated
assert (
result.configs[expected_updated_item[0]] == expected_updated_item[1]
), "Assert configs updated failed, expected: {}, actual: {}".format(
expected_updated_item[1], result.configs[expected_updated_item[0]]
)
assert (
result._secrets[expected_secret_item[0]] == expected_secret_item[1]
), "Assert secrets not updated failed, expected: {}, actual: {}".format(
expected_secret_item[1], result._secrets[expected_secret_item[0]]
)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_cli_perf.py | import contextlib
import io
import multiprocessing
import os
import sys
import tempfile
import timeit
import uuid
from pathlib import Path
from unittest import mock
import pytest
from promptflow._cli._user_agent import USER_AGENT as CLI_USER_AGENT # noqa: E402
from promptflow._sdk._telemetry import log_activity
from promptflow._sdk._utils import ClientUserAgentUtil
FLOWS_DIR = "./tests/test_configs/flows"
CONNECTIONS_DIR = "./tests/test_configs/connections"
DATAS_DIR = "./tests/test_configs/datas"
def mock_log_activity(*args, **kwargs):
custom_message = "github run: https://github.com/microsoft/promptflow/actions/runs/{0}".format(
os.environ.get("GITHUB_RUN_ID")
)
if len(args) == 4:
if args[3] is not None:
args[3]["custom_message"] = custom_message
else:
args = list(args)
args[3] = {"custom_message": custom_message}
elif "custom_dimensions" in kwargs and kwargs["custom_dimensions"] is not None:
kwargs["custom_dimensions"]["custom_message"] = custom_message
else:
kwargs["custom_dimensions"] = {"custom_message": custom_message}
return log_activity(*args, **kwargs)
def run_cli_command(cmd, time_limit=3600, result_queue=None):
from promptflow._cli._pf.entry import main
sys.argv = list(cmd)
output = io.StringIO()
st = timeit.default_timer()
with contextlib.redirect_stdout(output), mock.patch.object(
ClientUserAgentUtil, "get_user_agent"
) as get_user_agent_fun, mock.patch(
"promptflow._sdk._telemetry.activity.log_activity", side_effect=mock_log_activity
), mock.patch(
"promptflow._cli._pf.entry.log_activity", side_effect=mock_log_activity
):
# Client side will modify user agent only through ClientUserAgentUtil to avoid impact executor/runtime.
get_user_agent_fun.return_value = f"{CLI_USER_AGENT} perf_monitor/1.0"
user_agent = ClientUserAgentUtil.get_user_agent()
assert user_agent == f"{CLI_USER_AGENT} perf_monitor/1.0"
main()
ed = timeit.default_timer()
print(f"{cmd}, \n Total time: {ed - st}s")
assert ed - st < time_limit, f"The time limit is {time_limit}s, but it took {ed - st}s."
res_value = output.getvalue()
if result_queue:
result_queue.put(res_value)
return res_value
def subprocess_run_cli_command(cmd, time_limit=3600):
result_queue = multiprocessing.Queue()
process = multiprocessing.Process(
target=run_cli_command, args=(cmd,), kwargs={"time_limit": time_limit, "result_queue": result_queue}
)
process.start()
process.join()
assert process.exitcode == 0
return result_queue.get_nowait()
@pytest.mark.usefixtures("use_secrets_config_file", "setup_local_connection")
@pytest.mark.perf_monitor_test
class TestCliPerf:
def test_pf_run_create(self, time_limit=20) -> None:
res = subprocess_run_cli_command(
cmd=(
"pf",
"run",
"create",
"--flow",
f"{FLOWS_DIR}/print_input_flow",
"--data",
f"{DATAS_DIR}/print_input_flow.jsonl",
),
time_limit=time_limit,
)
assert "Completed" in res
def test_pf_run_update(self, time_limit=10) -> None:
run_name = str(uuid.uuid4())
run_cli_command(
cmd=(
"pf",
"run",
"create",
"--flow",
f"{FLOWS_DIR}/print_input_flow",
"--data",
f"{DATAS_DIR}/print_input_flow.jsonl",
"--name",
run_name,
)
)
res = subprocess_run_cli_command(
cmd=("pf", "run", "update", "--name", run_name, "--set", "description=test pf run update"),
time_limit=time_limit,
)
assert "Completed" in res
def test_pf_flow_test(self, time_limit=10):
subprocess_run_cli_command(
cmd=(
"pf",
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/print_input_flow",
"--inputs",
"text=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
),
time_limit=time_limit,
)
output_path = Path(FLOWS_DIR) / "print_input_flow" / ".promptflow" / "flow.output.json"
assert output_path.exists()
def test_pf_flow_build(self, time_limit=20):
with tempfile.TemporaryDirectory() as temp_dir:
subprocess_run_cli_command(
cmd=(
"pf",
"flow",
"build",
"--source",
f"{FLOWS_DIR}/print_input_flow/flow.dag.yaml",
"--output",
temp_dir,
"--format",
"docker",
),
time_limit=time_limit,
)
def test_pf_connection_create(self, time_limit=10):
name = f"Connection_{str(uuid.uuid4())[:4]}"
res = subprocess_run_cli_command(
cmd=(
"pf",
"connection",
"create",
"--file",
f"{CONNECTIONS_DIR}/azure_openai_connection.yaml",
"--name",
f"{name}",
),
time_limit=time_limit,
)
assert "api_type" in res
def test_pf_connection_list(self, time_limit=10):
name = "connection_list"
res = run_cli_command(
cmd=(
"pf",
"connection",
"create",
"--file",
f"{CONNECTIONS_DIR}/azure_openai_connection.yaml",
"--name",
f"{name}",
)
)
assert "api_type" in res
res = subprocess_run_cli_command(cmd=("pf", "connection", "list"), time_limit=time_limit)
assert "api_type" in res
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_tool.py | import importlib.util
import json
import sys
from pathlib import Path
from unittest.mock import patch
import pytest
from promptflow import ToolProvider, tool
from promptflow._core.tool_meta_generator import ToolValidationError
from promptflow._sdk._pf_client import PFClient
from promptflow.entities import DynamicList, InputSetting
from promptflow.exceptions import UserErrorException
PROMOTFLOW_ROOT = Path(__file__) / "../../../.."
TEST_ROOT = Path(__file__).parent.parent.parent
TOOL_ROOT = TEST_ROOT / "test_configs/tools"
_client = PFClient()
@pytest.mark.e2etest
class TestTool:
def get_tool_meta(self, tool_path):
module_name = f"test_tool.{Path(tool_path).stem}"
# Load the module from the file path
spec = importlib.util.spec_from_file_location(module_name, tool_path)
module = importlib.util.module_from_spec(spec)
# Load the module's code
spec.loader.exec_module(module)
tools_meta, _ = _client.tools._generate_tool_meta(module)
return tools_meta
def test_python_tool_meta(self):
tool_path = TOOL_ROOT / "python_tool.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.python_tool.PythonTool.python_tool": {
"class_name": "PythonTool",
"function": "python_tool",
"inputs": {"connection": {"type": ["AzureOpenAIConnection"]}, "input1": {"type": ["string"]}},
"module": "test_tool.python_tool",
"name": "PythonTool.python_tool",
"type": "python",
},
"test_tool.python_tool.my_python_tool": {
"function": "my_python_tool",
"inputs": {"input1": {"type": ["string"]}},
"module": "test_tool.python_tool",
"name": "python_tool",
"type": "python",
},
"test_tool.python_tool.my_python_tool_without_name": {
"function": "my_python_tool_without_name",
"inputs": {"input1": {"type": ["string"]}},
"module": "test_tool.python_tool",
"name": "my_python_tool_without_name",
"type": "python",
},
}
assert tool_meta == expect_tool_meta
def test_llm_tool_meta(self):
tool_path = TOOL_ROOT / "custom_llm_tool.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.custom_llm_tool.my_tool": {
"name": "My Custom LLM Tool",
"type": "custom_llm",
"inputs": {"connection": {"type": ["CustomConnection"]}},
"description": "This is a tool to demonstrate the custom_llm tool type",
"module": "test_tool.custom_llm_tool",
"function": "my_tool",
"enable_kwargs": True,
},
"test_tool.custom_llm_tool.TestCustomLLMTool.tool_func": {
"name": "My Custom LLM Tool",
"type": "custom_llm",
"inputs": {"connection": {"type": ["AzureOpenAIConnection"]}, "api": {"type": ["string"]}},
"description": "This is a tool to demonstrate the custom_llm tool type",
"module": "test_tool.custom_llm_tool",
"class_name": "TestCustomLLMTool",
"function": "tool_func",
"enable_kwargs": True,
},
}
assert tool_meta == expect_tool_meta
def test_invalid_tool_type(self):
with pytest.raises(UserErrorException) as exception:
@tool(name="invalid_tool_type", type="invalid_type")
def invalid_tool_type():
pass
assert exception.value.message == "Tool type invalid_type is not supported yet."
def test_tool_with_custom_connection(self):
tool_path = TOOL_ROOT / "tool_with_custom_connection.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.tool_with_custom_connection.MyTool.my_tool": {
"name": "My Second Tool",
"type": "python",
"inputs": {"connection": {"type": ["CustomConnection"]}, "input_text": {"type": ["string"]}},
"description": "This is my second tool",
"module": "test_tool.tool_with_custom_connection",
"class_name": "MyTool",
"function": "my_tool",
}
}
assert tool_meta == expect_tool_meta
tool_path = TOOL_ROOT / "tool_with_custom_strong_type_connection.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.tool_with_custom_strong_type_connection.my_tool": {
"name": "Tool With Custom Strong Type Connection",
"type": "python",
"inputs": {
"connection": {"type": ["CustomConnection"], "custom_type": ["MyCustomConnection"]},
"input_text": {"type": ["string"]},
},
"description": "This is my tool with custom strong type connection.",
"module": "test_tool.tool_with_custom_strong_type_connection",
"function": "my_tool",
}
}
assert tool_meta == expect_tool_meta
def test_tool_with_input_settings(self):
tool_path = TOOL_ROOT / "tool_with_dynamic_list_input.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.tool_with_dynamic_list_input.my_tool": {
"description": "This is my tool with dynamic list input",
"function": "my_tool",
"inputs": {
"endpoint_name": {
"dynamic_list": {
"func_kwargs": [
{
"default": "",
"name": "prefix",
"optional": True,
"reference": "${inputs.input_prefix}",
"type": ["string"],
}
],
"func_path": "test_tool.tool_with_dynamic_list_input.list_endpoint_names",
},
"type": ["string"],
},
"input_prefix": {"type": ["string"]},
"input_text": {
"allow_manual_entry": True,
"dynamic_list": {
"func_kwargs": [
{
"default": "",
"name": "prefix",
"optional": True,
"reference": "${inputs.input_prefix}",
"type": ["string"],
},
{"default": 10, "name": "size", "optional": True, "type": ["int"]},
],
"func_path": "test_tool.tool_with_dynamic_list_input.my_list_func",
},
"is_multi_select": True,
"type": ["list"],
},
},
"module": "test_tool.tool_with_dynamic_list_input",
"name": "My Tool with Dynamic List Input",
"type": "python",
}
}
assert tool_meta == expect_tool_meta
tool_path = TOOL_ROOT / "tool_with_enabled_by_value.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.tool_with_enabled_by_value.my_tool": {
"name": "My Tool with Enabled By Value",
"type": "python",
"inputs": {
"user_type": {"type": ["string"], "enum": ["student", "teacher"]},
"student_id": {"type": ["string"], "enabled_by": "user_type", "enabled_by_value": ["student"]},
"teacher_id": {"type": ["string"], "enabled_by": "user_type", "enabled_by_value": ["teacher"]},
},
"description": "This is my tool with enabled by value",
"module": "test_tool.tool_with_enabled_by_value",
"function": "my_tool",
}
}
assert tool_meta == expect_tool_meta
def test_dynamic_list_with_invalid_reference(self):
def my_list_func(prefix: str, size: int = 10):
pass
# value in reference doesn't exist in tool inputs
invalid_dynamic_list_setting = DynamicList(function=my_list_func, input_mapping={"prefix": "invalid_input"})
input_settings = {
"input_text": InputSetting(
dynamic_list=invalid_dynamic_list_setting, allow_manual_entry=True, is_multi_select=True
)
}
@tool(
name="My Tool with Dynamic List Input",
description="This is my tool with dynamic list input",
input_settings=input_settings,
)
def my_tool(input_text: list, input_prefix: str) -> str:
return f"Hello {input_prefix} {','.join(input_text)}"
with pytest.raises(ToolValidationError) as exception:
_client.tools.validate(my_tool, raise_error=True)
assert "Cannot find invalid_input in the tool inputs." in exception.value.message
# invalid dynamic func input
invalid_dynamic_list_setting = DynamicList(
function=my_list_func, input_mapping={"invalid_input": "input_prefix"}
)
input_settings = {
"input_text": InputSetting(
dynamic_list=invalid_dynamic_list_setting, allow_manual_entry=True, is_multi_select=True
)
}
@tool(
name="My Tool with Dynamic List Input",
description="This is my tool with dynamic list input",
input_settings=input_settings,
)
def my_tool(input_text: list, input_prefix: str) -> str:
return f"Hello {input_prefix} {','.join(input_text)}"
with pytest.raises(ToolValidationError) as exception:
_client.tools.validate(my_tool, raise_error=True)
assert "Cannot find invalid_input in the inputs of dynamic_list func" in exception.value.message
# check required inputs of dynamic list func
invalid_dynamic_list_setting = DynamicList(function=my_list_func, input_mapping={"size": "input_prefix"})
input_settings = {
"input_text": InputSetting(
dynamic_list=invalid_dynamic_list_setting,
)
}
@tool(
name="My Tool with Dynamic List Input",
description="This is my tool with dynamic list input",
input_settings=input_settings,
)
def my_tool(input_text: list, input_prefix: str) -> str:
return f"Hello {input_prefix} {','.join(input_text)}"
with pytest.raises(ToolValidationError) as exception:
_client.tools.validate(my_tool, raise_error=True)
assert "Missing required input(s) of dynamic_list function: ['prefix']" in exception.value.message
def test_enabled_by_with_invalid_input(self):
# value in enabled_by_value doesn't exist in tool inputs
input1_settings = InputSetting(enabled_by="invalid_input")
@tool(name="enabled_by_with_invalid_input", input_settings={"input1": input1_settings})
def enabled_by_with_invalid_input(input1: str, input2: str):
pass
with pytest.raises(ToolValidationError) as exception:
_client.tools.validate(enabled_by_with_invalid_input, raise_error=True)
assert 'Cannot find the input \\"invalid_input\\"' in exception.value.message
def test_tool_with_file_path_input(self):
tool_path = TOOL_ROOT / "tool_with_file_path_input.py"
tool_meta = self.get_tool_meta(tool_path)
expect_tool_meta = {
"test_tool.tool_with_file_path_input.my_tool": {
"name": "Tool with FilePath Input",
"type": "python",
"inputs": {"input_file": {"type": ["file_path"]}, "input_text": {"type": ["string"]}},
"description": "This is a tool to demonstrate the usage of FilePath input",
"module": "test_tool.tool_with_file_path_input",
"function": "my_tool",
}
}
assert expect_tool_meta == tool_meta
def test_tool_with_generated_by_input(self):
tool_path = TOOL_ROOT / "tool_with_generated_by_input.py"
tool_meta = self.get_tool_meta(tool_path)
with open(TOOL_ROOT / "expected_generated_by_meta.json", "r") as f:
expect_tool_meta = json.load(f)
assert expect_tool_meta == tool_meta
def test_validate_tool_script(self):
tool_script_path = TOOL_ROOT / "custom_llm_tool.py"
result = _client.tools.validate(tool_script_path)
assert result.passed
tool_script_path = TOOL_ROOT / "tool_with_dynamic_list_input.py"
result = _client.tools.validate(tool_script_path)
assert result.passed
tool_script_path = TOOL_ROOT / "invalid_tool.py"
result = _client.tools.validate(tool_script_path)
assert len(result._errors) == 4
assert "1 is not of type 'string'" in result.error_messages["invalid_schema_type"]
assert (
"Cannot provide both `icon` and `icon_light` or `icon_dark`." in result.error_messages["invalid_tool_icon"]
)
assert (
'Cannot find the input "invalid_input" for the enabled_by of teacher_id.'
in result.error_messages["invalid_input_settings"]
)
assert (
'Cannot find the input "invalid_input" for the enabled_by of student_id.'
in result.error_messages["invalid_input_settings"]
)
assert all(str(tool_script_path) == item.location for item in result._errors)
with pytest.raises(ToolValidationError):
_client.tools.validate(TOOL_ROOT / "invalid_tool.py", raise_error=True)
def test_validate_tool_func(self):
def load_module_by_path(source):
module_name = Path(source).stem
spec = importlib.util.spec_from_file_location(module_name, source)
module = importlib.util.module_from_spec(spec)
# Load the module's code
spec.loader.exec_module(module)
return module
tool_script_path = TOOL_ROOT / "custom_llm_tool.py"
module = load_module_by_path(tool_script_path)
tool_func = getattr(module, "my_tool")
result = _client.tools.validate(tool_func)
assert result.passed
tool_script_path = TOOL_ROOT / "invalid_tool.py"
module = load_module_by_path(tool_script_path)
tool_func = getattr(module, "invalid_schema_type")
result = _client.tools.validate(tool_func)
assert "invalid_schema_type" in result.error_messages
assert "1 is not of type 'string'" in result.error_messages["invalid_schema_type"]
assert "invalid_schema_type" == result._errors[0].function_name
assert str(tool_script_path) == result._errors[0].location
with pytest.raises(ToolValidationError):
_client.tools.validate(tool_func, raise_error=True)
def test_validate_package_tool(self):
package_tool_path = TOOL_ROOT / "tool_package"
sys.path.append(str(package_tool_path.resolve()))
import tool_package
with patch("promptflow._sdk.operations._tool_operations.ToolOperations._is_package_tool", return_value=True):
result = _client.tools.validate(tool_package)
assert len(result._errors) == 4
assert "1 is not of type 'string'" in result.error_messages["invalid_schema_type"]
assert (
"Cannot provide both `icon` and `icon_light` or `icon_dark`." in result.error_messages["invalid_tool_icon"]
)
assert (
'Cannot find the input "invalid_input" for the enabled_by of teacher_id.'
in result.error_messages["invalid_input_settings"]
)
assert (
'Cannot find the input "invalid_input" for the enabled_by of student_id.'
in result.error_messages["invalid_input_settings"]
)
def test_validate_tool_class(self):
from promptflow.tools.serpapi import SerpAPI
result = _client.tools.validate(SerpAPI)
assert result.passed
class InvalidToolClass(ToolProvider):
def __init__(self):
super().__init__()
@tool(name="My Custom Tool")
def tool_func(self, api: str):
pass
@tool(name=1)
def invalid_tool_func(self, api: str):
pass
result = _client.tools.validate(InvalidToolClass)
assert not result.passed
assert result._kwargs["total_count"] == 2
assert result._kwargs["invalid_count"] == 1
assert len(result._errors) == 1
assert "1 is not of type 'string'" in result._errors[0].message
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_experiment.py | from pathlib import Path
import pytest
from ruamel.yaml import YAML
from promptflow import PFClient
from promptflow._sdk._constants import ExperimentStatus, RunStatus
from promptflow._sdk._load_functions import load_common
from promptflow._sdk.entities._experiment import (
Experiment,
ExperimentData,
ExperimentInput,
ExperimentTemplate,
FlowNode,
)
TEST_ROOT = Path(__file__).parent.parent.parent
EXP_ROOT = TEST_ROOT / "test_configs/experiments"
FLOW_ROOT = TEST_ROOT / "test_configs/flows"
yaml = YAML(typ="safe")
@pytest.mark.e2etest
@pytest.mark.usefixtures("setup_experiment_table")
class TestExperiment:
def test_experiment_from_template(self):
template_path = EXP_ROOT / "basic-no-script-template" / "basic.exp.yaml"
# Load template and create experiment
template = load_common(ExperimentTemplate, source=template_path)
experiment = Experiment.from_template(template)
# Assert experiment parts are resolved
assert len(experiment.nodes) == 2
assert all(isinstance(n, FlowNode) for n in experiment.nodes)
assert len(experiment.data) == 1
assert isinstance(experiment.data[0], ExperimentData)
assert len(experiment.inputs) == 1
assert isinstance(experiment.inputs[0], ExperimentInput)
# Assert type is resolved
assert experiment.inputs[0].default == 1
# Pop schema and resolve path
expected = dict(yaml.load(open(template_path, "r", encoding="utf-8").read()))
expected.pop("$schema")
expected["data"][0]["path"] = (FLOW_ROOT / "web_classification" / "data.jsonl").absolute().as_posix()
expected["nodes"][0]["path"] = (experiment._output_dir / "snapshots" / "main").absolute().as_posix()
expected["nodes"][1]["path"] = (experiment._output_dir / "snapshots" / "eval").absolute().as_posix()
experiment_dict = experiment._to_dict()
assert experiment_dict["data"][0].items() == expected["data"][0].items()
assert experiment_dict["nodes"][0].items() == expected["nodes"][0].items()
assert experiment_dict["nodes"][1].items() == expected["nodes"][1].items()
assert experiment_dict.items() >= expected.items()
def test_experiment_create_and_get(self):
template_path = EXP_ROOT / "basic-no-script-template" / "basic.exp.yaml"
# Load template and create experiment
template = load_common(ExperimentTemplate, source=template_path)
experiment = Experiment.from_template(template)
client = PFClient()
exp = client._experiments.create_or_update(experiment)
assert len(client._experiments.list()) > 0
exp_get = client._experiments.get(name=exp.name)
assert exp_get._to_dict() == exp._to_dict()
@pytest.mark.usefixtures("use_secrets_config_file", "recording_injection", "setup_local_connection")
def test_experiment_start(self):
template_path = EXP_ROOT / "basic-no-script-template" / "basic.exp.yaml"
# Load template and create experiment
template = load_common(ExperimentTemplate, source=template_path)
experiment = Experiment.from_template(template)
client = PFClient()
exp = client._experiments.create_or_update(experiment)
exp = client._experiments.start(exp.name)
assert exp.status == ExperimentStatus.TERMINATED
# Assert main run
assert len(exp.node_runs["main"]) > 0
main_run = client.runs.get(name=exp.node_runs["main"][0]["name"])
assert main_run.status == RunStatus.COMPLETED
assert main_run.variant == "${summarize_text_content.variant_0}"
assert main_run.display_name == "main"
assert len(exp.node_runs["eval"]) > 0
# Assert eval run and metrics
eval_run = client.runs.get(name=exp.node_runs["eval"][0]["name"])
assert eval_run.status == RunStatus.COMPLETED
assert eval_run.display_name == "eval"
metrics = client.runs.get_metrics(name=eval_run.name)
assert "accuracy" in metrics
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_cli.py | import importlib
import importlib.util
import json
import logging
import multiprocessing
import os
import os.path
import shutil
import sys
import tempfile
import uuid
from pathlib import Path
from tempfile import mkdtemp
from typing import Dict, List
from unittest.mock import patch
import mock
import pytest
from promptflow._cli._pf.entry import main
from promptflow._constants import PF_USER_AGENT
from promptflow._core.operation_context import OperationContext
from promptflow._sdk._constants import LOGGER_NAME, SCRUBBED_VALUE, ExperimentStatus
from promptflow._sdk._errors import RunNotFoundError
from promptflow._sdk._utils import ClientUserAgentUtil, setup_user_agent_to_operation_context
from promptflow._sdk.operations._local_storage_operations import LocalStorageOperations
from promptflow._sdk.operations._run_operations import RunOperations
from promptflow._utils.context_utils import _change_working_dir
from promptflow._utils.utils import environment_variable_overwrite, parse_ua_to_dict
from promptflow._utils.yaml_utils import dump_yaml, load_yaml
from promptflow.exceptions import UserErrorException
FLOWS_DIR = "./tests/test_configs/flows"
EXPERIMENT_DIR = "./tests/test_configs/experiments"
RUNS_DIR = "./tests/test_configs/runs"
CONNECTIONS_DIR = "./tests/test_configs/connections"
DATAS_DIR = "./tests/test_configs/datas"
TOOL_ROOT = "./tests/test_configs/tools"
TARGET_URL = "https://www.youtube.com/watch?v=o5ZQyXaAv1g"
# TODO: move this to a shared utility module
def run_pf_command(*args, cwd=None):
"""Run a pf command with the given arguments and working directory.
There have been some unknown issues in using subprocess on CI, so we use this function instead, which will also
provide better debugging experience.
"""
origin_argv, origin_cwd = sys.argv, os.path.abspath(os.curdir)
try:
sys.argv = ["pf"] + list(args)
if cwd:
os.chdir(cwd)
main()
finally:
sys.argv = origin_argv
os.chdir(origin_cwd)
def run_batch(local_client, line_timeout_seconds, timeout_index=None):
os.environ["PF_LINE_TIMEOUT_SEC"] = line_timeout_seconds
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/simple_flow_with_ten_inputs",
"--data",
f"{FLOWS_DIR}/simple_flow_with_ten_inputs/data.jsonl",
"--name",
run_id,
)
run = local_client.runs.get(name=run_id)
local_storage = LocalStorageOperations(run)
detail = local_storage.load_detail()
flow_runs_list = detail["flow_runs"]
for i, flow_run in enumerate(flow_runs_list):
if i == timeout_index:
assert flow_run["status"] == "Failed"
assert flow_run["error"]["message"] == f"Line {i} execution timeout for exceeding 54 seconds"
assert flow_run["error"]["code"] == "UserError"
assert flow_run["error"]["innerError"]["code"] == "LineExecutionTimeoutError"
else:
assert flow_run["status"] == "Completed"
os.environ.pop("PF_LINE_TIMEOUT_SEC")
@pytest.mark.usefixtures(
"use_secrets_config_file", "recording_injection", "setup_local_connection", "install_custom_tool_pkg"
)
@pytest.mark.cli_test
@pytest.mark.e2etest
class TestCli:
def test_pf_version(self, capfd):
run_pf_command("--version")
out, _ = capfd.readouterr()
assert "0.0.1\n" in out
def test_basic_flow_run(self, capfd) -> None:
# fetch std out
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--name",
str(uuid.uuid4()),
)
out, _ = capfd.readouterr()
assert "Completed" in out
def test_basic_flow_run_batch_and_eval(self, capfd) -> None:
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--name",
run_id,
)
out, _ = capfd.readouterr()
assert "Completed" in out
# Check the CLI works correctly when the parameter is surrounded by quotation, as below shown:
# --param "key=value" key="value"
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/classification_accuracy_evaluation",
"--column-mapping",
"'groundtruth=${data.answer}'",
"prediction='${run.outputs.category}'",
"variant_id=${data.variant_id}",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--run",
run_id,
)
out, _ = capfd.readouterr()
assert "Completed" in out
def test_submit_run_with_yaml(self, capfd):
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--file",
"./sample_bulk_run.yaml",
"--name",
run_id,
cwd=f"{RUNS_DIR}",
)
out, _ = capfd.readouterr()
assert "Completed" in out
run_pf_command(
"run",
"create",
"--file",
"./sample_eval_run.yaml",
"--run",
run_id,
cwd=f"{RUNS_DIR}",
)
out, _ = capfd.readouterr()
assert "Completed" in out
def test_submit_batch_variant(self, local_client):
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--name",
run_id,
"--variant",
"${summarize_text_content.variant_0}",
)
run = local_client.runs.get(name=run_id)
local_storage = LocalStorageOperations(run)
detail = local_storage.load_detail()
tuning_node = next((x for x in detail["node_runs"] if x["node"] == "summarize_text_content"), None)
# used variant_0 config, defaults using variant_1
assert tuning_node["inputs"]["temperature"] == 0.2
def test_environment_variable_overwrite(self, local_client, local_aoai_connection):
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--name",
run_id,
"--flow",
f"{FLOWS_DIR}/print_env_var",
"--data",
f"{DATAS_DIR}/env_var_names.jsonl",
"--environment-variables",
"API_BASE=${azure_open_ai_connection.api_base}",
)
outputs = local_client.runs._get_outputs(run=run_id)
assert outputs["output"][0] == local_aoai_connection.api_base
def test_connection_overwrite(self, local_alt_aoai_connection, capfd):
# CLi command will fail with SystemExit
with pytest.raises(SystemExit):
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--connection",
"classify_with_llm.connection=not_exist",
)
out, _ = capfd.readouterr()
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--connection",
"classify_with_llm.connection=new_ai_connection",
)
out, _ = capfd.readouterr()
assert "Completed" in out
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--connection",
"classify_with_llm.model=new_model",
)
out, _ = capfd.readouterr()
assert "Completed" in out
def test_create_with_set(self, local_client):
run_id = str(uuid.uuid4())
display_name = "test_run"
description = "test description"
run_pf_command(
"run",
"create",
"--name",
run_id,
"--flow",
f"{FLOWS_DIR}/print_env_var",
"--data",
f"{DATAS_DIR}/env_var_names.jsonl",
"--environment-variables",
"API_BASE=${azure_open_ai_connection.api_base}",
"--set",
f"display_name={display_name}",
"tags.key=val",
f"description={description}",
)
run = local_client.runs.get(run_id)
assert display_name in run.display_name
assert run.tags == {"key": "val"}
assert run.description == description
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--file",
"./sample_bulk_run.yaml",
"--name",
run_id,
"--set",
f"display_name={display_name}",
"tags.key=val",
f"description={description}",
cwd=f"{RUNS_DIR}",
)
assert display_name in run.display_name
assert run.tags == {"key": "val"}
assert run.description == description
def test_pf_flow_test(self):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / "flow.output.json"
assert output_path.exists()
log_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / "flow.log"
with open(log_path, "r") as f:
previous_log_content = f.read()
# Test without input
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / "flow.output.json"
assert output_path.exists()
log_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / "flow.log"
with open(log_path, "r") as f:
log_content = f.read()
assert previous_log_content not in log_content
def test_pf_flow_test_with_non_english_input_output(self, capsys):
question = "什么是 chat gpt"
run_pf_command("flow", "test", "--flow", f"{FLOWS_DIR}/chat_flow", "--inputs", f'question="{question}"')
stdout, _ = capsys.readouterr()
output_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "flow.output.json"
assert output_path.exists()
with open(output_path, "r", encoding="utf-8") as f:
outputs = json.load(f)
assert outputs["answer"] in json.loads(stdout)["answer"]
detail_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "flow.detail.json"
assert detail_path.exists()
with open(detail_path, "r", encoding="utf-8") as f:
detail = json.load(f)
assert detail["flow_runs"][0]["inputs"]["question"] == question
assert detail["flow_runs"][0]["output"]["answer"] == outputs["answer"]
def test_pf_flow_with_variant(self, capsys):
with tempfile.TemporaryDirectory() as temp_dir:
shutil.copytree((Path(FLOWS_DIR) / "web_classification").resolve().as_posix(), temp_dir, dirs_exist_ok=True)
dag = Path(temp_dir) / "flow.dag.yaml"
flow_dict = load_yaml(dag)
node_name = "summarize_text_content"
node = next(filter(lambda item: item["name"] == node_name, flow_dict["nodes"]))
flow_dict["nodes"].remove(node)
flow_dict["nodes"].append({"name": node_name, "use_variants": True})
with open(Path(temp_dir) / "flow.dag.yaml", "w") as f:
dump_yaml(flow_dict, f)
run_pf_command(
"flow",
"test",
"--flow",
temp_dir,
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
)
output_path = Path(temp_dir) / ".promptflow" / "flow.output.json"
assert output_path.exists()
run_pf_command(
"flow",
"test",
"--flow",
temp_dir,
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
"--variant",
"'${summarize_text_content.variant_1}'",
)
output_path = Path(temp_dir) / ".promptflow" / "flow-summarize_text_content-variant_1.output.json"
assert output_path.exists()
# Test flow dag with invalid format
node_name = flow_dict["nodes"][0]["name"]
flow_dict["nodes"][0]["use_variants"] = True
flow_dict["node_variants"][node_name] = {
"default_variant_id": "invalid_variant",
"variants": [{"variant_0": {}}],
}
with open(Path(temp_dir) / "flow.dag.yaml", "w") as f:
dump_yaml(flow_dict, f)
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
temp_dir,
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
"--variant",
"${summarize_text_content.variant_1}",
)
outerr = capsys.readouterr()
assert f"Cannot find the variant invalid_variant for {node_name}." in outerr.out
def test_pf_flow_test_single_node(self):
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"inputs.url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
# Test node with node reference input
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
'input_str={"category": "App", "evidence": "URL"}',
"--node",
"convert_to_dict",
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / "flow-convert_to_dict.node.detail.json"
assert output_path.exists()
# Test without input
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--node",
node_name,
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
# Test with input file
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--node",
node_name,
"--input",
f"{FLOWS_DIR}/web_classification/{node_name}_input.jsonl",
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
# Test with input file
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--node",
node_name,
"--inputs",
f"{FLOWS_DIR}/web_classification/{node_name}_input.jsonl",
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
def test_pf_flow_test_debug_single_node(self):
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"inputs.url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
"--debug",
)
# Debug node with node reference input
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
'classify_with_llm.output={"category": "App", "evidence": "URL"}',
"--node",
"convert_to_dict",
"--debug",
)
def test_pf_flow_test_with_additional_includes(self):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_additional_include",
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
)
output_path = (
Path(FLOWS_DIR) / "web_classification_with_additional_include" / ".promptflow" / "flow.output.json"
)
assert output_path.exists()
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_additional_include",
"--inputs",
"inputs.url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
)
def test_pf_flow_test_with_symbolic(self, prepare_symbolic_flow):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_symbolic",
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
)
output_path = Path(FLOWS_DIR) / "web_classification_with_symbolic" / ".promptflow" / "flow.output.json"
assert output_path.exists()
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_symbolic",
"--inputs",
"inputs.url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
)
@pytest.mark.parametrize(
"flow_folder_name, env_key, except_value",
[
pytest.param(
"print_env_var",
"API_BASE",
"${azure_open_ai_connection.api_base}",
id="TestFlowWithEnvironmentVariables",
),
pytest.param(
"flow_with_environment_variables",
"env1",
"2",
id="LoadEnvVariablesWithoutOverridesInYaml",
),
],
)
def test_flow_test_with_environment_variable(self, flow_folder_name, env_key, except_value, local_client):
from promptflow._sdk._submitter.utils import SubmitterHelper
def validate_stdout(detail_path):
with open(detail_path, "r") as f:
details = json.load(f)
assert details["node_runs"][0]["logs"]["stdout"]
env = {env_key: except_value}
SubmitterHelper.resolve_environment_variables(env, local_client)
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/{flow_folder_name}",
"--inputs",
f"key={env_key}",
"--environment-variables",
"API_BASE=${azure_open_ai_connection.api_base}",
)
with open(Path(FLOWS_DIR) / flow_folder_name / ".promptflow" / "flow.output.json", "r") as f:
outputs = json.load(f)
assert outputs["output"] == env[env_key]
validate_stdout(Path(FLOWS_DIR) / flow_folder_name / ".promptflow" / "flow.detail.json")
# Test log contains user printed outputs
log_path = Path(FLOWS_DIR) / flow_folder_name / ".promptflow" / "flow.log"
with open(log_path, "r") as f:
log_content = f.read()
assert env[env_key] in log_content
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/{flow_folder_name}",
"--inputs",
f"inputs.key={env_key}",
"--environment-variables",
"API_BASE=${azure_open_ai_connection.api_base}",
"--node",
"print_env",
)
with open(Path(FLOWS_DIR) / flow_folder_name / ".promptflow" / "flow-print_env.node.output.json", "r") as f:
outputs = json.load(f)
assert outputs["value"] == env[env_key]
validate_stdout(Path(FLOWS_DIR) / flow_folder_name / ".promptflow" / "flow-print_env.node.detail.json")
def _validate_requirement(self, flow_path):
with open(flow_path) as f:
flow_dict = load_yaml(f)
assert flow_dict.get("environment", {}).get("python_requirements_txt", None)
assert (flow_path.parent / flow_dict["environment"]["python_requirements_txt"]).exists()
def test_flow_with_exception(self, capsys):
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_exception",
)
captured = capsys.readouterr()
assert "Execution failure in 'convert_to_dict': (Exception) mock exception" in captured.out
output_path = Path(FLOWS_DIR) / "web_classification_with_exception" / ".promptflow" / "flow.detail.json"
assert output_path.exists()
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification_with_exception",
"--inputs",
'classify_with_llm.output={"category": "App", "evidence": "URL"}',
"--node",
"convert_to_dict",
)
captured = capsys.readouterr()
assert "convert_to_dict.py" in captured.out
assert "mock exception" in captured.out
output_path = (
Path(FLOWS_DIR)
/ "web_classification_with_exception"
/ ".promptflow"
/ "flow-convert_to_dict.node.detail.json"
)
assert output_path.exists()
def test_init_eval_flow(self):
temp_dir = mkdtemp()
with _change_working_dir(temp_dir):
flow_name = "eval_flow"
# Init standard flow
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"evaluation",
)
ignore_file_path = Path(temp_dir) / flow_name / ".gitignore"
assert ignore_file_path.exists()
ignore_file_path.unlink()
# TODO remove variant_id & line_number in evaluate template
run_pf_command("flow", "test", "--flow", flow_name, "--inputs", "groundtruth=App", "prediction=App")
self._validate_requirement(Path(temp_dir) / flow_name / "flow.dag.yaml")
def test_init_chat_flow(self):
temp_dir = mkdtemp()
with _change_working_dir(temp_dir):
flow_name = "chat_flow"
# Init standard flow
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"chat",
)
ignore_file_path = Path(temp_dir) / flow_name / ".gitignore"
assert ignore_file_path.exists()
ignore_file_path.unlink()
# Only azure openai connection in test env
with open(Path(temp_dir) / flow_name / "flow.dag.yaml", "r") as f:
flow_dict = load_yaml(f)
flow_dict["nodes"][0]["provider"] = "AzureOpenAI"
flow_dict["nodes"][0]["connection"] = "azure_open_ai_connection"
with open(Path(temp_dir) / flow_name / "flow.dag.yaml", "w") as f:
dump_yaml(flow_dict, f)
run_pf_command("flow", "test", "--flow", flow_name, "--inputs", "question=hi")
self._validate_requirement(Path(temp_dir) / flow_name / "flow.dag.yaml")
def test_flow_init(self, capsys):
temp_dir = mkdtemp()
with _change_working_dir(temp_dir):
flow_name = "standard_flow"
# Init standard flow
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"standard",
)
self._validate_requirement(Path(temp_dir) / flow_name / "flow.dag.yaml")
ignore_file_path = Path(temp_dir) / flow_name / ".gitignore"
requirements_file_path = Path(temp_dir) / flow_name / "requirements.txt"
assert ignore_file_path.exists()
assert requirements_file_path.exists()
ignore_file_path.unlink()
run_pf_command("flow", "test", "--flow", flow_name, "--inputs", "text=value")
jinja_name = "input1"
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--entry",
"hello.py",
"--function",
"my_python_tool",
"--prompt-template",
f"{jinja_name}=hello.jinja2",
)
self._validate_requirement(Path(temp_dir) / flow_name / "flow.dag.yaml")
assert ignore_file_path.exists()
assert requirements_file_path.exists()
with open(Path(temp_dir) / flow_name / ".promptflow" / "flow.tools.json", "r") as f:
tools_dict = json.load(f)["code"]
assert jinja_name in tools_dict
assert len(tools_dict[jinja_name]["inputs"]) == 1
assert tools_dict[jinja_name]["inputs"]["text"]["type"] == ["string"]
assert tools_dict[jinja_name]["source"] == "hello.jinja2"
# Test prompt-template doesn't exist
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--entry",
"hello.py",
"--function",
"my_python_tool",
"--prompt-template",
f"{jinja_name}={jinja_name}.jinja2",
)
self._validate_requirement(Path(temp_dir) / flow_name / "flow.dag.yaml")
assert (Path(temp_dir) / flow_name / f"{jinja_name}.jinja2").exists()
# Test template name doesn't exist in python function
jinja_name = "mock_jinja"
with pytest.raises(UserErrorException) as ex:
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--entry",
"hello.py",
"--function",
"my_python_tool",
"--prompt-template",
f"{jinja_name}={jinja_name}.jinja2",
)
assert f"Template parameter {jinja_name} doesn't find in python function arguments." in str(ex.value)
with pytest.raises(SystemExit):
run_pf_command("flow", "init")
_, err = capsys.readouterr()
assert "pf flow init: error: the following arguments are required: --flow" in err
def test_flow_init_intent_copilot(self):
flow_path = os.path.join(FLOWS_DIR, "intent-copilot")
run_pf_command(
"flow",
"init",
"--flow",
flow_path,
"--entry",
"intent.py",
"--function",
"extract_intent",
"--prompt-template",
"chat_prompt=user_intent_zero_shot.jinja2",
)
with open(Path(flow_path) / "flow.dag.yaml", "r") as f:
flow_dict = load_yaml(f)
assert "chat_history" in flow_dict["inputs"]
assert "customer_info" in flow_dict["inputs"]
chat_prompt_node = next(filter(lambda item: item["name"] == "chat_prompt", flow_dict["nodes"]))
assert "chat_history" in chat_prompt_node["inputs"]
assert "customer_info" in chat_prompt_node["inputs"]
def test_flow_init_with_connection_and_deployment(self):
def check_connection_and_deployment(flow_folder, connection, deployment):
with open(Path(flow_folder) / "flow.dag.yaml", "r") as f:
flow_dict = load_yaml(f)
assert flow_dict["nodes"][0]["inputs"]["deployment_name"] == deployment
assert flow_dict["nodes"][0]["connection"] == connection
temp_dir = mkdtemp()
with _change_working_dir(temp_dir):
flow_name = "chat_flow"
flow_folder = Path(temp_dir) / flow_name
# When configure local connection provider, init chat flow without connection and deployment.
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"chat",
)
# Assert connection files created
assert (flow_folder / "azure_openai.yaml").exists()
assert (flow_folder / "openai.yaml").exists()
# When configure local connection provider, init chat flow with connection and deployment.
connection = "connection_name"
deployment = "deployment_name"
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"chat",
"--connection",
connection,
"--deployment",
deployment,
"--yes",
)
# Assert connection files created and the connection/deployment is set in flow.dag.yaml
check_connection_and_deployment(flow_folder, connection=connection, deployment=deployment)
connection_files = [flow_folder / "azure_openai.yaml", flow_folder / "openai.yaml"]
for file in connection_files:
assert file.exists()
with open(file, "r") as f:
connection_dict = load_yaml(f)
assert connection_dict["name"] == connection
shutil.rmtree(flow_folder)
target = "promptflow._sdk._pf_client.Configuration.get_connection_provider"
with mock.patch(target) as mocked:
mocked.return_value = "azureml:xx"
# When configure azure connection provider, init chat flow without connection and deployment.
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"chat",
"--yes",
)
# Assert connection files not created.
assert not (flow_folder / "azure_openai.yaml").exists()
assert not (flow_folder / "openai.yaml").exists()
# When configure azure connection provider, init chat flow with connection and deployment.
connection = "connection_name"
deployment = "deployment_name"
run_pf_command(
"flow",
"init",
"--flow",
flow_name,
"--type",
"chat",
"--connection",
connection,
"--deployment",
deployment,
"--yes",
)
# Assert connection files not created and the connection/deployment is set in flow.dag.yaml
check_connection_and_deployment(flow_folder, connection=connection, deployment=deployment)
assert not (flow_folder / "azure_openai.yaml").exists()
assert not (flow_folder / "openai.yaml").exists()
def test_flow_chat(self, monkeypatch, capsys):
chat_list = ["hi", "what is chat gpt?"]
def mock_input(*args, **kwargs):
if chat_list:
return chat_list.pop()
else:
raise KeyboardInterrupt()
monkeypatch.setattr("builtins.input", mock_input)
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow",
"--interactive",
)
output_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "chat.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "chat.detail.json"
assert detail_path.exists()
# Test streaming output
chat_list = ["hi", "what is chat gpt?"]
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_stream_output",
"--interactive",
)
output_path = Path(FLOWS_DIR) / "chat_flow_with_stream_output" / ".promptflow" / "chat.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow_with_stream_output" / ".promptflow" / "chat.detail.json"
assert detail_path.exists()
chat_list = ["hi", "what is chat gpt?"]
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_python_node_streaming_output",
"--interactive",
)
output_path = Path(FLOWS_DIR) / "chat_flow_with_stream_output" / ".promptflow" / "chat.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow_with_stream_output" / ".promptflow" / "chat.detail.json"
assert detail_path.exists()
# Validate terminal output
chat_list = ["hi", "what is chat gpt?"]
run_pf_command("flow", "test", "--flow", f"{FLOWS_DIR}/chat_flow", "--interactive", "--verbose")
outerr = capsys.readouterr()
# Check node output
assert "chat_node:" in outerr.out
assert "show_answer:" in outerr.out
assert "[show_answer]: print:" in outerr.out
chat_list = ["hi", "what is chat gpt?"]
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_exception",
"--interactive",
)
outerr = capsys.readouterr()
assert "Execution failure in 'show_answer': (Exception) mock exception" in outerr.out
output_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "chat.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow" / ".promptflow" / "chat.detail.json"
assert detail_path.exists()
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_multi_output_invalid",
"--interactive",
)
outerr = capsys.readouterr()
assert "chat flow does not support multiple chat outputs" in outerr.out
def test_flow_test_with_default_chat_history(self):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_default_history",
)
output_path = Path(FLOWS_DIR) / "chat_flow_with_default_history" / ".promptflow" / "flow.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow_with_default_history" / ".promptflow" / "flow.detail.json"
assert detail_path.exists()
with open(detail_path, "r") as f:
details = json.load(f)
expect_chat_history = [
{"inputs": {"question": "hi"}, "outputs": {"answer": "hi"}},
{"inputs": {"question": "who are you"}, "outputs": {"answer": "who are you"}},
]
assert details["flow_runs"][0]["inputs"]["chat_history"] == expect_chat_history
def test_flow_test_with_user_defined_chat_history(self, monkeypatch, capsys):
chat_list = ["hi", "what is chat gpt?"]
def mock_input(*args, **kwargs):
if chat_list:
return chat_list.pop()
else:
raise KeyboardInterrupt()
monkeypatch.setattr("builtins.input", mock_input)
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_with_defined_chat_history",
"--interactive",
)
output_path = Path(FLOWS_DIR) / "chat_flow_with_defined_chat_history" / ".promptflow" / "chat.output.json"
assert output_path.exists()
detail_path = Path(FLOWS_DIR) / "chat_flow_with_defined_chat_history" / ".promptflow" / "chat.detail.json"
assert detail_path.exists()
# Test is_chat_history is set False
with pytest.raises(SystemExit):
chat_list = ["hi", "what is chat gpt?"]
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/chat_flow_without_defined_chat_history",
"--interactive",
)
outerr = capsys.readouterr()
assert "chat_history is required in the inputs of chat flow" in outerr.out
@pytest.mark.parametrize(
"extra_args,expected_err",
[
pytest.param(
[],
"Required input(s) ['key'] are missing for \"flow\".",
id="missing_required_flow_inputs",
),
pytest.param(
["--node", "print_env"],
"Required input(s) ['key'] are missing for \"print_env\".",
id="missing_required_node_inputs",
),
],
)
def test_flow_test_inputs_missing(self, capsys, caplog, extra_args: List[str], expected_err: str):
with pytest.raises(SystemExit):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/print_env_var",
"--environment-variables",
"API_BASE=${azure_open_ai_connection.api_base}",
*extra_args,
)
stdout, _ = capsys.readouterr()
assert expected_err in stdout
@pytest.mark.parametrize(
"extra_args,expected_inputs,expected_log_prefixes",
[
pytest.param(
[
"--inputs",
f"url={TARGET_URL}",
"answer=Channel",
"evidence=Url",
],
[
{"answer": "Channel", "evidence": "Url"},
{"url": TARGET_URL, "answer": "Channel", "evidence": "Url"},
],
[
"Unknown input(s) of flow: ",
"flow input(s): ",
],
id="unknown_flow_inputs",
),
pytest.param(
[
"--inputs",
f"inputs.url={TARGET_URL}",
"unknown_input=unknown_val",
"--node",
"fetch_text_content_from_url",
],
[
{"unknown_input": "unknown_val"},
{"fetch_url": TARGET_URL, "unknown_input": "unknown_val"},
],
[
"Unknown input(s) of fetch_text_content_from_url: ",
"fetch_text_content_from_url input(s): ",
],
id="unknown_inputs_node",
),
],
)
def test_flow_test_inputs_unknown(
self, caplog, extra_args: List[str], expected_inputs: List[Dict[str, str]], expected_log_prefixes: List[str]
):
logger = logging.getLogger(LOGGER_NAME)
logger.propagate = True
def validate_log(log_msg, prefix, expect_dict):
log_inputs = json.loads(log_msg[len(prefix) :].replace("'", '"'))
assert prefix in log_msg
assert expect_dict == log_inputs
with caplog.at_level(level=logging.INFO, logger=LOGGER_NAME):
run_pf_command("flow", "test", "--flow", f"{FLOWS_DIR}/web_classification", *extra_args)
for log, expected_input, expected_log_prefix in zip(caplog.records, expected_inputs, expected_log_prefixes):
validate_log(
prefix=expected_log_prefix,
log_msg=log.message,
expect_dict=expected_input,
)
def test_flow_build(self):
source = f"{FLOWS_DIR}/web_classification_with_additional_include/flow.dag.yaml"
output_path = "dist"
def get_node_settings(_flow_dag_path: Path):
flow_dag = load_yaml(_flow_dag_path)
target_node = next(filter(lambda x: x["name"] == "summarize_text_content", flow_dag["nodes"]))
target_node.pop("name")
return target_node
try:
run_pf_command(
"flow",
"build",
"--source",
source,
"--output",
output_path,
"--format",
"docker",
"--variant",
"${summarize_text_content.variant_0}",
)
new_flow_dag_path = Path(output_path, "flow", "flow.dag.yaml")
flow_dag = load_yaml(Path(source))
assert (
get_node_settings(new_flow_dag_path)
== flow_dag["node_variants"]["summarize_text_content"]["variants"]["variant_0"]["node"]
)
assert get_node_settings(Path(source)) != get_node_settings(new_flow_dag_path)
connection_path = Path(output_path, "connections", "azure_open_ai_connection.yaml")
assert connection_path.exists()
finally:
shutil.rmtree(output_path, ignore_errors=True)
def test_flow_build_with_ua(self):
with pytest.raises(UserErrorException) as e:
run_pf_command(
"flow",
"build",
"--source",
"not_exist",
"--output",
"dist",
"--format",
"docker",
"--user-agent",
"test/1.0.0",
)
assert "not exist" in str(e.value)
@pytest.mark.parametrize(
"file_name, expected, update_item",
[
(
"azure_openai_connection.yaml",
{
"module": "promptflow.connections",
"type": "azure_open_ai",
"api_type": "azure",
"api_version": "2023-07-01-preview",
"api_key": SCRUBBED_VALUE,
"api_base": "aoai-api-endpoint",
},
("api_base", "new_value"),
),
(
"custom_connection.yaml",
{
"module": "promptflow.connections",
"type": "custom",
"configs": {"key1": "test1"},
"secrets": {"key2": SCRUBBED_VALUE},
},
("configs.key1", "new_value"),
),
(
"custom_strong_type_connection.yaml",
{
"module": "promptflow.connections",
"type": "custom",
"configs": {
"api_base": "This is my first connection.",
"promptflow.connection.custom_type": "MyFirstConnection",
"promptflow.connection.module": "my_tool_package.connections",
"promptflow.connection.package": "test-custom-tools",
"promptflow.connection.package_version": "0.0.2",
},
"secrets": {"api_key": SCRUBBED_VALUE},
},
("configs.api_base", "new_value"),
),
],
)
def test_connection_create_update(
self, install_custom_tool_pkg, file_name, expected, update_item, capfd, local_client
):
name = f"Connection_{str(uuid.uuid4())[:4]}"
run_pf_command("connection", "create", "--file", f"{CONNECTIONS_DIR}/{file_name}", "--name", f"{name}")
out, err = capfd.readouterr()
# Assert in to skip some datetime fields
assert expected.items() <= json.loads(out).items()
# Update with --set
update_key, update_value = update_item
run_pf_command("connection", "update", "--set", f"{update_key}={update_value}", "--name", f"{name}")
out, _ = capfd.readouterr()
assert update_value in out, f"expected updated value {update_value} not in {out}"
connection = local_client.connections.get(name)
# Assert secrets are not scrubbed
assert not any(v == SCRUBBED_VALUE for v in connection._secrets.values())
def test_input_with_dict_val(self, pf):
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--file",
"./input_with_dict_val.yaml",
"--name",
run_id,
cwd=f"{RUNS_DIR}",
)
outputs = pf.runs._get_outputs(run=run_id)
assert "dict" in outputs["output"][0]
def test_visualize_ignore_space(self) -> None:
names = ["a,b,c,d", "a, b, c, d", "a, b , c, d"]
groundtruth = ["a", "b", "c", "d"]
def mocked_visualize(*args, **kwargs):
runs = args[0]
assert runs == groundtruth
with patch.object(RunOperations, "visualize") as mock_visualize:
mock_visualize.side_effect = mocked_visualize
for name in names:
run_pf_command(
"run",
"visualize",
"--names",
name,
)
def test_pf_run_with_stream_log(self, capfd):
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/flow_with_user_output",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--column-mapping",
"key=value",
"extra=${data.url}",
"--stream",
)
out, _ = capfd.readouterr()
# For Batch run, the executor uses bulk logger to print logs, and only prints the error log of the nodes.
existing_keywords = ["execution", "execution.bulk", "WARNING", "error log"]
non_existing_keywords = ["execution.flow", "user log"]
for keyword in existing_keywords:
assert keyword in out
for keyword in non_existing_keywords:
assert keyword not in out
def test_pf_run_no_stream_log(self, capfd):
# without --stream, logs will be in the run's log file
run_pf_command(
"run",
"create",
"--flow",
f"{FLOWS_DIR}/flow_with_user_output",
"--data",
f"{DATAS_DIR}/webClassification3.jsonl",
"--column-mapping",
"key=value",
"extra=${data.url}",
)
out, _ = capfd.readouterr()
assert "user log" not in out
assert "error log" not in out
# flow logs won't stream
assert "Executing node print_val. node run id:" not in out
# executor logs won't stream
assert "Node print_val completes." not in out
def test_format_cli_exception(self, capsys):
from promptflow._sdk.operations._connection_operations import ConnectionOperations
with patch.dict(os.environ, {"PROMPTFLOW_STRUCTURE_EXCEPTION_OUTPUT": "true"}):
with pytest.raises(SystemExit):
run_pf_command(
"connection",
"show",
"--name",
"invalid_connection_name",
)
outerr = capsys.readouterr()
assert outerr.err
error_msg = json.loads(outerr.err)
assert error_msg["code"] == "UserError"
assert error_msg["innerError"]["innerError"]["code"] == "ConnectionNotFoundError"
def mocked_connection_get(*args, **kwargs):
raise Exception("mock exception")
with patch.object(ConnectionOperations, "get") as mock_connection_get:
mock_connection_get.side_effect = mocked_connection_get
with pytest.raises(Exception):
run_pf_command(
"connection",
"show",
"--name",
"invalid_connection_name",
)
outerr = capsys.readouterr()
assert outerr.err
error_msg = json.loads(outerr.err)
assert error_msg["code"] == "SystemError"
with pytest.raises(SystemExit):
run_pf_command(
"connection",
"show",
"--name",
"invalid_connection_name",
)
outerr = capsys.readouterr()
assert not outerr.err
def test_tool_init(self, capsys):
with tempfile.TemporaryDirectory() as temp_dir:
package_name = "package_name"
func_name = "func_name"
run_pf_command("tool", "init", "--package", package_name, "--tool", func_name, cwd=temp_dir)
package_folder = Path(temp_dir) / package_name
sys.path.append(str(package_folder.absolute()))
assert (package_folder / package_name / f"{func_name}.py").exists()
assert (package_folder / package_name / "utils.py").exists()
assert (package_folder / package_name / "__init__.py").exists()
assert (package_folder / "setup.py").exists()
assert (package_folder / "README.md").exists()
spec = importlib.util.spec_from_file_location(
f"{package_name}.utils", package_folder / package_name / "utils.py"
)
utils = importlib.util.module_from_spec(spec)
spec.loader.exec_module(utils)
assert hasattr(utils, "list_package_tools")
tools_meta = utils.list_package_tools()
assert f"{package_name}.{func_name}.{func_name}" in tools_meta
meta = tools_meta[f"{package_name}.{func_name}.{func_name}"]
assert meta["function"] == func_name
assert meta["module"] == f"{package_name}.{func_name}"
assert meta["name"] == func_name
assert meta["description"] == f"This is {func_name} tool"
assert meta["type"] == "python"
# Invalid package/tool name
invalid_package_name = "123-package-name"
invalid_tool_name = "123_tool_name"
with pytest.raises(SystemExit):
run_pf_command("tool", "init", "--package", invalid_package_name, "--tool", func_name, cwd=temp_dir)
outerr = capsys.readouterr()
assert f"The package name {invalid_package_name} is a invalid identifier." in outerr.out
with pytest.raises(SystemExit):
run_pf_command("tool", "init", "--package", package_name, "--tool", invalid_tool_name, cwd=temp_dir)
outerr = capsys.readouterr()
assert f"The tool name {invalid_tool_name} is a invalid identifier." in outerr.out
with pytest.raises(SystemExit):
run_pf_command("tool", "init", "--tool", invalid_tool_name, cwd=temp_dir)
outerr = capsys.readouterr()
assert f"The tool name {invalid_tool_name} is a invalid identifier." in outerr.out
# Test init package tool with extra info
package_name = "tool_with_extra_info"
package_folder = Path(temp_dir) / package_name
package_folder.mkdir(exist_ok=True, parents=True)
manifest_file = package_folder / "MANIFEST.in"
mock_manifest_content = "include mock/path"
with open(manifest_file, "w") as f:
f.write(mock_manifest_content)
icon_path = Path(DATAS_DIR) / "logo.jpg"
category = "test_category"
tags = {"tag1": "value1", "tag2": "value2"}
run_pf_command(
"tool",
"init",
"--package",
package_name,
"--tool",
func_name,
"--set",
f"icon={icon_path.absolute()}",
f"category={category}",
f"tags={tags}",
cwd=temp_dir,
)
with open(manifest_file, "r") as f:
content = f.read()
assert mock_manifest_content in content
assert f"include {package_name}/icons" in content
# Add a tool script with icon
tool_script_name = "tool_func_with_icon"
run_pf_command(
"tool",
"init",
"--tool",
tool_script_name,
"--set",
f"icon={icon_path.absolute()}",
f"category={category}",
f"tags={tags}",
cwd=Path(temp_dir) / package_name / package_name,
)
sys.path.append(str(package_folder.absolute()))
spec = importlib.util.spec_from_file_location(
f"{package_name}.utils", package_folder / package_name / "utils.py"
)
utils = importlib.util.module_from_spec(spec)
spec.loader.exec_module(utils)
assert hasattr(utils, "list_package_tools")
tools_meta = utils.list_package_tools()
meta = tools_meta[f"{package_name}.{func_name}.{func_name}"]
assert meta["category"] == category
assert meta["tags"] == tags
assert meta["icon"].startswith("data:image")
assert tools_meta[f"{package_name}.{tool_script_name}.{tool_script_name}"]["icon"].startswith("data:image")
# icon doesn't exist
with pytest.raises(SystemExit):
run_pf_command(
"tool",
"init",
"--package",
package_name,
"--tool",
func_name,
"--set",
"icon=invalid_icon_path",
cwd=temp_dir,
)
outerr = capsys.readouterr()
assert "Cannot find the icon path" in outerr.out
def test_tool_list(self, capsys):
# List package tools in environment
run_pf_command("tool", "list")
outerr = capsys.readouterr()
tools_dict = json.loads(outerr.out)
package_tool_name = "promptflow.tools.embedding.embedding"
assert package_tool_name in tools_dict["package"]
# List flow tools and package tools
run_pf_command("tool", "list", "--flow", f"{FLOWS_DIR}/chat_flow")
outerr = capsys.readouterr()
tools_dict = json.loads(outerr.out)
expect_flow_tools = {
"chat.jinja2": {
"type": "llm",
"inputs": {"chat_history": {"type": ["string"]}, "question": {"type": ["string"]}},
"source": "chat.jinja2",
},
"show_answer.py": {
"type": "python",
"inputs": {"chat_answer": {"type": ["string"]}},
"source": "show_answer.py",
"function": "show_answer",
},
}
assert tools_dict["code"] == expect_flow_tools
assert package_tool_name in tools_dict["package"]
# Invalid flow parameter
with pytest.raises(SystemExit):
run_pf_command("tool", "list", "--flow", "invalid_flow_folder")
outerr = capsys.readouterr()
assert "invalid_flow_folder does not exist" in outerr.out
def test_tool_validate(self):
# Test validate tool script
tool_script_path = Path(TOOL_ROOT) / "custom_llm_tool.py"
run_pf_command("tool", "validate", "--source", str(tool_script_path))
invalid_tool_script_path = Path(TOOL_ROOT) / "invalid_tool.py"
with pytest.raises(SystemExit):
run_pf_command("tool", "validate", "--source", str(invalid_tool_script_path))
# Test validate package tool
tool_script_path = Path(TOOL_ROOT) / "tool_package"
sys.path.append(str(tool_script_path.resolve()))
with patch("promptflow._sdk.operations._tool_operations.ToolOperations._is_package_tool", return_value=True):
with pytest.raises(SystemExit):
run_pf_command("tool", "validate", "--source", "tool_package")
# Test validate tool in package
with pytest.raises(SystemExit):
run_pf_command("tool", "validate", "--source", "tool_package.invalid_tool.invalid_input_settings")
def test_flow_test_with_image_input_and_output(self):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/python_tool_with_simple_image",
)
output_path = Path(FLOWS_DIR) / "python_tool_with_simple_image" / ".promptflow" / "output"
assert output_path.exists()
image_path = Path(FLOWS_DIR) / "python_tool_with_simple_image" / ".promptflow" / "intermediate"
assert image_path.exists()
def test_flow_test_with_composite_image(self):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/python_tool_with_composite_image",
)
output_path = Path(FLOWS_DIR) / "python_tool_with_composite_image" / ".promptflow" / "output"
assert output_path.exists()
image_path = Path(FLOWS_DIR) / "python_tool_with_composite_image" / ".promptflow" / "intermediate"
assert image_path.exists()
def test_run_file_with_set(self, pf) -> None:
name = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--file",
f"{RUNS_DIR}/run_with_env.yaml",
"--set",
f"name={name}",
)
# run exists
pf.runs.get(name=name)
def test_run_file_with_set_priority(self, pf) -> None:
# --name has higher priority than --set
name1 = str(uuid.uuid4())
name2 = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--file",
f"{RUNS_DIR}/run_with_env.yaml",
"--set",
f"name={name1}",
"--name",
name2,
)
# run exists
try:
pf.runs.get(name=name1)
except RunNotFoundError:
pass
pf.runs.get(name=name2)
def test_data_scrubbing(self):
# Prepare connection
run_pf_command(
"connection", "create", "--file", f"{CONNECTIONS_DIR}/custom_connection.yaml", "--name", "custom_connection"
)
# Test flow run
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/print_secret_flow",
)
output_path = Path(FLOWS_DIR) / "print_secret_flow" / ".promptflow" / "flow.output.json"
assert output_path.exists()
log_path = Path(FLOWS_DIR) / "print_secret_flow" / ".promptflow" / "flow.log"
with open(log_path, "r") as f:
log_content = f.read()
assert "**data_scrubbed**" in log_content
# Test node run
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/print_secret_flow",
"--node",
"print_secret",
"--inputs",
"conn=custom_connection",
"inputs.topic=atom",
)
output_path = Path(FLOWS_DIR) / "print_secret_flow" / ".promptflow" / "flow-print_secret.node.detail.json"
assert output_path.exists()
log_path = Path(FLOWS_DIR) / "print_secret_flow" / ".promptflow" / "print_secret.node.log"
with open(log_path, "r") as f:
log_content = f.read()
assert "**data_scrubbed**" in log_content
def test_cli_ua(self, pf):
# clear user agent before test
context = OperationContext().get_instance()
context.user_agent = ""
with environment_variable_overwrite(PF_USER_AGENT, ""):
with pytest.raises(SystemExit):
run_pf_command(
"run",
"show",
"--name",
"not_exist",
)
user_agent = ClientUserAgentUtil.get_user_agent()
ua_dict = parse_ua_to_dict(user_agent)
assert ua_dict.keys() == {"promptflow-sdk", "promptflow-cli"}
def test_config_set_pure_flow_directory_macro(self, capfd: pytest.CaptureFixture) -> None:
run_pf_command(
"config",
"set",
"run.output_path='${flow_directory}'",
)
out, _ = capfd.readouterr()
expected_error_message = (
"Invalid config value '${flow_directory}' for 'run.output_path': "
"Cannot specify flow directory as run output path; "
"if you want to specify run output path under flow directory, "
"please use its child folder, e.g. '${flow_directory}/.runs'."
)
assert expected_error_message in out
from promptflow._sdk._configuration import Configuration
config = Configuration.get_instance()
assert config.get_run_output_path() is None
def test_user_agent_in_cli(self):
context = OperationContext().get_instance()
context.user_agent = ""
with pytest.raises(SystemExit):
run_pf_command(
"run",
"show",
"--name",
"not_exist",
"--user-agent",
"a/1.0.0 b/2.0",
)
user_agent = ClientUserAgentUtil.get_user_agent()
ua_dict = parse_ua_to_dict(user_agent)
assert ua_dict.keys() == {"promptflow-sdk", "promptflow-cli", "a", "b"}
context.user_agent = ""
def test_node_run_telemetry(self, local_client):
from promptflow._sdk._telemetry.logging_handler import PromptFlowSDKLogHandler
def assert_node_run(*args, **kwargs):
record = args[0]
assert record.msg.startswith("pf.flow.node_test") or record.msg.startswith("pf.flows.node_test")
assert record.custom_dimensions["activity_name"] in ["pf.flow.node_test", "pf.flows.node_test"]
def assert_flow_test(*args, **kwargs):
record = args[0]
assert record.msg.startswith("pf.flow.test") or record.msg.startswith("pf.flows.test")
assert record.custom_dimensions["activity_name"] in ["pf.flow.test", "pf.flows.test"]
with tempfile.TemporaryDirectory() as temp_dir:
shutil.copytree((Path(FLOWS_DIR) / "print_env_var").resolve().as_posix(), temp_dir, dirs_exist_ok=True)
with patch.object(PromptFlowSDKLogHandler, "emit") as mock_logger:
mock_logger.side_effect = assert_node_run
run_pf_command(
"flow",
"test",
"--flow",
temp_dir,
"--inputs",
"key=API_BASE",
"--node",
"print_env",
)
with patch.object(PromptFlowSDKLogHandler, "emit") as mock_logger:
mock_logger.side_effect = assert_flow_test
run_pf_command(
"flow",
"test",
"--flow",
temp_dir,
"--inputs",
"key=API_BASE",
)
def test_run_create_with_existing_run_folder(self):
run_name = "web_classification_variant_0_20231205_120253_104100"
# clean the run if exists
from promptflow import PFClient
from promptflow._cli._utils import _try_delete_existing_run_record
pf = PFClient()
_try_delete_existing_run_record(run_name)
# assert the run doesn't exist
with pytest.raises(RunNotFoundError):
pf.runs.get(run_name)
uuid_str = str(uuid.uuid4())
run_folder = Path(RUNS_DIR) / run_name
run_pf_command(
"run",
"create",
"--source",
Path(run_folder).resolve().as_posix(),
"--set",
f"display_name={uuid_str}",
f"description={uuid_str}",
f"tags.tag1={uuid_str}",
)
# check run results
run = pf.runs.get(run_name)
assert run.display_name == uuid_str
assert run.description == uuid_str
assert run.tags["tag1"] == uuid_str
def test_cli_command_no_sub_command(self, capfd):
try:
run_pf_command(
"run",
)
# argparse will return SystemExit after running --help
except SystemExit:
pass
# will run pf run -h
out, _ = capfd.readouterr()
assert "A CLI tool to manage runs for prompt flow." in out
try:
run_pf_command("run", "-h")
# argparse will return SystemExit after running --help
except SystemExit:
pass
# will run pf run -h
out, _ = capfd.readouterr()
assert "A CLI tool to manage runs for prompt flow." in out
def test_unknown_command(self, capfd):
try:
run_pf_command(
"unknown",
)
# argparse will return SystemExit after running --help
except SystemExit:
pass
_, err = capfd.readouterr()
assert "invalid choice" in err
def test_config_set_user_agent(self) -> None:
run_pf_command(
"config",
"set",
"user_agent=test/1.0.0",
)
user_agent = setup_user_agent_to_operation_context(None)
ua_dict = parse_ua_to_dict(user_agent)
assert ua_dict.keys() == {"promptflow-sdk", "promptflow-cli", "PFCustomer_test"}
# clear user agent
run_pf_command(
"config",
"set",
"user_agent=",
)
context = OperationContext().get_instance()
context.user_agent = ""
def test_basic_flow_run_delete(self, monkeypatch, local_client, capfd) -> None:
input_list = ["y"]
def mock_input(*args, **kwargs):
if input_list:
return input_list.pop()
else:
raise KeyboardInterrupt()
monkeypatch.setattr("builtins.input", mock_input)
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--name",
run_id,
"--flow",
f"{FLOWS_DIR}/print_env_var",
"--data",
f"{DATAS_DIR}/env_var_names.jsonl",
)
out, _ = capfd.readouterr()
assert "Completed" in out
run_a = local_client.runs.get(name=run_id)
local_storage = LocalStorageOperations(run_a)
path_a = local_storage.path
assert os.path.exists(path_a)
# delete the run
run_pf_command(
"run",
"delete",
"--name",
f"{run_id}",
)
# both runs are deleted and their folders are deleted
assert not os.path.exists(path_a)
def test_basic_flow_run_delete_no_confirm(self, monkeypatch, local_client, capfd) -> None:
run_id = str(uuid.uuid4())
run_pf_command(
"run",
"create",
"--name",
run_id,
"--flow",
f"{FLOWS_DIR}/print_env_var",
"--data",
f"{DATAS_DIR}/env_var_names.jsonl",
)
out, _ = capfd.readouterr()
assert "Completed" in out
run_a = local_client.runs.get(name=run_id)
local_storage = LocalStorageOperations(run_a)
path_a = local_storage.path
assert os.path.exists(path_a)
# delete the run
run_pf_command("run", "delete", "--name", f"{run_id}", "-y")
# both runs are deleted and their folders are deleted
assert not os.path.exists(path_a)
def test_basic_flow_run_delete_error(self, monkeypatch) -> None:
input_list = ["y"]
def mock_input(*args, **kwargs):
if input_list:
return input_list.pop()
else:
raise KeyboardInterrupt()
monkeypatch.setattr("builtins.input", mock_input)
run_id = str(uuid.uuid4())
# delete the run
with pytest.raises(SystemExit):
run_pf_command(
"run",
"delete",
"--name",
f"{run_id}",
)
def test_experiment_hide_by_default(self, monkeypatch, capfd):
# experiment will be hide if no config set
with pytest.raises(SystemExit):
run_pf_command(
"experiment",
"create",
"--template",
f"{EXPERIMENT_DIR}/basic-no-script-template/basic.exp.yaml",
)
@pytest.mark.usefixtures("setup_experiment_table")
def test_experiment_start(self, monkeypatch, capfd, local_client):
with mock.patch("promptflow._sdk._configuration.Configuration.is_internal_features_enabled") as mock_func:
mock_func.return_value = True
exp_name = str(uuid.uuid4())
run_pf_command(
"experiment",
"create",
"--template",
f"{EXPERIMENT_DIR}/basic-no-script-template/basic.exp.yaml",
"--name",
exp_name,
)
out, _ = capfd.readouterr()
assert exp_name in out
assert ExperimentStatus.NOT_STARTED in out
run_pf_command(
"experiment",
"start",
"--name",
exp_name,
)
out, _ = capfd.readouterr()
assert ExperimentStatus.TERMINATED in out
exp = local_client._experiments.get(name=exp_name)
assert len(exp.node_runs["main"]) > 0
assert len(exp.node_runs["eval"]) > 0
metrics = local_client.runs.get_metrics(name=exp.node_runs["eval"][0]["name"])
assert "accuracy" in metrics
def test_batch_run_timeout(self, local_client):
line_timeout_seconds = "54"
timout_index = 9
p = multiprocessing.Process(
target=run_batch,
args=(local_client, line_timeout_seconds, timout_index),
)
p.start()
p.join()
assert p.exitcode == 0
def test_batch_run_completed_within_the_required_time(self, local_client):
line_timeout_seconds = "600"
p = multiprocessing.Process(
target=run_batch,
args=(
local_client,
line_timeout_seconds,
),
)
p.start()
p.join()
assert p.exitcode == 0
def test_run_list(self, local_client):
from promptflow._sdk.entities import Run
with patch.object(Run, "_to_dict") as mock_to_dict:
mock_to_dict.side_effect = RuntimeError("mock exception")
run_pf_command(
"run",
"list",
)
def test_pf_flow_test_with_detail(self, tmpdir):
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"url=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"answer=Channel",
"evidence=Url",
"--detail",
Path(tmpdir).as_posix(),
)
# when specify parameter `detail`, detail, output and log will be saved in both
# the specified folder and ".promptflow" under flow folder
for parent_folder in [
Path(FLOWS_DIR) / "web_classification" / ".promptflow",
Path(tmpdir),
]:
for filename in ["flow.detail.json", "flow.output.json", "flow.log"]:
path = parent_folder / filename
assert path.is_file()
def test_pf_flow_test_single_node_with_detail(self, tmpdir):
node_name = "fetch_text_content_from_url"
run_pf_command(
"flow",
"test",
"--flow",
f"{FLOWS_DIR}/web_classification",
"--inputs",
"inputs.url="
"https://www.microsoft.com/en-us/d/xbox-wireless-controller-stellar-shift-special-edition/94fbjc7h0h6h",
"--node",
node_name,
"--detail",
Path(tmpdir).as_posix(),
)
output_path = Path(FLOWS_DIR) / "web_classification" / ".promptflow" / f"flow-{node_name}.node.detail.json"
assert output_path.exists()
# when specify parameter `detail`, node detail, output and log will be saved in both
# the specified folder and ".promptflow" under flow folder
for parent_folder in [
Path(FLOWS_DIR) / "web_classification" / ".promptflow",
Path(tmpdir),
]:
for filename in [
f"flow-{node_name}.node.detail.json",
f"flow-{node_name}.node.output.json",
f"{node_name}.node.log",
]:
path = parent_folder / filename
assert path.is_file()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_run.py | import os
import shutil
import sys
import tempfile
import uuid
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from marshmallow import ValidationError
from pytest_mock import MockerFixture
from promptflow import PFClient
from promptflow._constants import PROMPTFLOW_CONNECTIONS
from promptflow._sdk._constants import (
FLOW_DIRECTORY_MACRO_IN_CONFIG,
PROMPT_FLOW_DIR_NAME,
FlowRunProperties,
LocalStorageFilenames,
RunStatus,
)
from promptflow._sdk._errors import (
ConnectionNotFoundError,
InvalidFlowError,
InvalidRunError,
InvalidRunStatusError,
RunExistsError,
RunNotFoundError,
)
from promptflow._sdk._load_functions import load_flow, load_run
from promptflow._sdk._run_functions import create_yaml_run
from promptflow._sdk._submitter.utils import SubmitterHelper
from promptflow._sdk._utils import _get_additional_includes
from promptflow._sdk.entities import Run
from promptflow._sdk.operations._local_storage_operations import LocalStorageOperations
from promptflow.connections import AzureOpenAIConnection
from promptflow.exceptions import UserErrorException
from ..recording_utilities import RecordStorage
PROMOTFLOW_ROOT = Path(__file__) / "../../../.."
TEST_ROOT = Path(__file__).parent.parent.parent
MODEL_ROOT = TEST_ROOT / "test_configs/e2e_samples"
CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix()
FLOWS_DIR = "./tests/test_configs/flows"
EAGER_FLOWS_DIR = "./tests/test_configs/eager_flows"
RUNS_DIR = "./tests/test_configs/runs"
DATAS_DIR = "./tests/test_configs/datas"
def create_run_against_multi_line_data(client) -> Run:
return client.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification3.jsonl",
column_mapping={"url": "${data.url}"},
)
def create_run_against_multi_line_data_without_llm(client: PFClient) -> Run:
return client.run(
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
)
def create_run_against_run(client, run: Run) -> Run:
return client.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
data=f"{DATAS_DIR}/webClassification3.jsonl",
run=run.name,
column_mapping={
"groundtruth": "${data.answer}",
"prediction": "${run.outputs.category}",
"variant_id": "${data.variant_id}",
},
)
def assert_run_with_invalid_column_mapping(client: PFClient, run: Run) -> None:
assert run.status == RunStatus.FAILED
with pytest.raises(InvalidRunStatusError):
client.stream(run.name)
local_storage = LocalStorageOperations(run)
assert os.path.exists(local_storage._exception_path)
exception = local_storage.load_exception()
assert "The input for batch run is incorrect. Couldn't find these mapping relations" in exception["message"]
assert exception["code"] == "UserError"
assert exception["innerError"]["innerError"]["code"] == "BulkRunException"
@pytest.mark.usefixtures(
"use_secrets_config_file", "recording_injection", "setup_local_connection", "install_custom_tool_pkg"
)
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestFlowRun:
def test_basic_flow_bulk_run(self, azure_open_ai_connection: AzureOpenAIConnection, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
# Test repeated execute flow run
pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
pf.run(flow=f"{FLOWS_DIR}/web_classification_v1", data=data_path)
pf.run(flow=f"{FLOWS_DIR}/web_classification_v2", data=data_path)
# TODO: check details
# df = pf.show_details(baseline, v1, v2)
def test_basic_run_bulk(self, azure_open_ai_connection: AzureOpenAIConnection, local_client, pf):
result = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"url": "${data.url}"},
)
local_storage = LocalStorageOperations(result)
detail = local_storage.load_detail()
tuning_node = next((x for x in detail["node_runs"] if x["node"] == "summarize_text_content"), None)
# used default variant config
assert tuning_node["inputs"]["temperature"] == 0.3
assert "variant_0" in result.name
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
# write to user_dir/.promptflow/.runs
assert ".promptflow" in run.properties["output_path"]
def test_local_storage_delete(self, pf):
result = pf.run(flow=f"{FLOWS_DIR}/print_env_var", data=f"{DATAS_DIR}/env_var_names.jsonl")
local_storage = LocalStorageOperations(result)
local_storage.delete()
assert not os.path.exists(local_storage._outputs_path)
def test_flow_run_delete(self, pf):
result = pf.run(flow=f"{FLOWS_DIR}/print_env_var", data=f"{DATAS_DIR}/env_var_names.jsonl")
local_storage = LocalStorageOperations(result)
output_path = local_storage.path
# delete new created run by name
pf.runs.delete(result.name)
# check folders and dbs are deleted
assert not os.path.exists(output_path)
from promptflow._sdk._orm import RunInfo as ORMRun
pytest.raises(RunNotFoundError, lambda: ORMRun.get(result.name))
pytest.raises(RunNotFoundError, lambda: pf.runs.get(result.name))
def test_flow_run_delete_fake_id_raise(self, pf: PFClient):
run = "fake_run_id"
# delete new created run by name
pytest.raises(RunNotFoundError, lambda: pf.runs.delete(name=run))
@pytest.mark.skipif(sys.platform == "win32", reason="Windows doesn't support chmod, just test permission errors")
def test_flow_run_delete_invalid_permission_raise(self, pf: PFClient):
result = pf.run(flow=f"{FLOWS_DIR}/print_env_var", data=f"{DATAS_DIR}/env_var_names.jsonl")
local_storage = LocalStorageOperations(result)
output_path = local_storage.path
os.chmod(output_path, 0o555)
# delete new created run by name
pytest.raises(InvalidRunError, lambda: pf.runs.delete(name=result.name))
# Change folder permission back
os.chmod(output_path, 0o755)
pf.runs.delete(name=result.name)
assert not os.path.exists(output_path)
def test_visualize_run_with_referenced_run_deleted(self, pf: PFClient):
run_id = str(uuid.uuid4())
run = load_run(
source=f"{RUNS_DIR}/sample_bulk_run.yaml",
params_override=[{"name": run_id}],
)
run_a = pf.runs.create_or_update(run=run)
local_storage_a = LocalStorageOperations(run_a)
output_path_a = local_storage_a.path
run = load_run(source=f"{RUNS_DIR}/sample_eval_run.yaml", params_override=[{"run": run_id}])
run_b = pf.runs.create_or_update(run=run)
local_storage_b = LocalStorageOperations(run_b)
output_path_b = local_storage_b.path
pf.runs.delete(run_a.name)
assert not os.path.exists(output_path_a)
assert os.path.exists(output_path_b)
# visualize doesn't raise error
pf.runs.visualize(run_b.name)
def test_basic_flow_with_variant(self, azure_open_ai_connection: AzureOpenAIConnection, local_client, pf) -> None:
result = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"url": "${data.url}"},
variant="${summarize_text_content.variant_0}",
)
local_storage = LocalStorageOperations(result)
detail = local_storage.load_detail()
tuning_node = next((x for x in detail["node_runs"] if x["node"] == "summarize_text_content"), None)
assert "variant_0" in result.name
# used variant_0 config
assert tuning_node["inputs"]["temperature"] == 0.2
result = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"url": "${data.url}"},
variant="${summarize_text_content.variant_1}",
)
local_storage = LocalStorageOperations(result)
detail = local_storage.load_detail()
tuning_node = next((x for x in detail["node_runs"] if x["node"] == "summarize_text_content"), None)
assert "variant_1" in result.name
# used variant_1 config
assert tuning_node["inputs"]["temperature"] == 0.3
def test_run_bulk_error(self, pf):
# path not exist
with pytest.raises(FileNotFoundError) as e:
pf.run(
flow=f"{MODEL_ROOT}/not_exist",
data=f"{DATAS_DIR}/webClassification3.jsonl",
column_mapping={"question": "${data.question}", "context": "${data.context}"},
variant="${summarize_text_content.variant_0}",
)
assert "not exist" in str(e.value)
# tuning_node not exist
with pytest.raises(InvalidFlowError) as e:
pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification3.jsonl",
column_mapping={"question": "${data.question}", "context": "${data.context}"},
variant="${not_exist.variant_0}",
)
assert "Node not_exist not found in flow" in str(e.value)
# invalid variant format
with pytest.raises(UserErrorException) as e:
pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification3.jsonl",
column_mapping={"question": "${data.question}", "context": "${data.context}"},
variant="v",
)
assert "Invalid variant format: v, variant should be in format of ${TUNING_NODE.VARIANT}" in str(e.value)
def test_basic_evaluation(self, azure_open_ai_connection: AzureOpenAIConnection, local_client, pf):
result = pf.run(
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
)
assert local_client.runs.get(result.name).status == "Completed"
eval_result = pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
run=result.name,
column_mapping={
"prediction": "${run.outputs.output}",
# evaluation reference run.inputs
# NOTE: we need this value to guard behavior when a run reference another run's inputs
"variant_id": "${run.inputs.key}",
# can reference other columns in data which doesn't exist in base run's inputs
"groundtruth": "${run.inputs.extra_key}",
},
)
assert local_client.runs.get(eval_result.name).status == "Completed"
def test_flow_demo(self, azure_open_ai_connection, pf):
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
column_mapping = {
"groundtruth": "${data.answer}",
"prediction": "${run.outputs.category}",
"variant_id": "${data.variant_id}",
}
metrics = {}
for flow_name, output_key in [
("web_classification", "baseline"),
("web_classification_v1", "v1"),
("web_classification_v2", "v2"),
]:
v = pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
metrics[output_key] = pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
data=data_path,
run=v,
column_mapping=column_mapping,
)
def test_submit_run_from_yaml(self, local_client, pf):
run_id = str(uuid.uuid4())
run = create_yaml_run(source=f"{RUNS_DIR}/sample_bulk_run.yaml", params_override=[{"name": run_id}])
assert local_client.runs.get(run.name).status == "Completed"
eval_run = create_yaml_run(
source=f"{RUNS_DIR}/sample_eval_run.yaml",
params_override=[{"run": run_id}],
)
assert local_client.runs.get(eval_run.name).status == "Completed"
@pytest.mark.usefixtures("enable_logger_propagate")
def test_submit_run_with_extra_params(self, pf, caplog):
run_id = str(uuid.uuid4())
run = create_yaml_run(source=f"{RUNS_DIR}/extra_field.yaml", params_override=[{"name": run_id}])
assert pf.runs.get(run.name).status == "Completed"
assert "Run schema validation warnings. Unknown fields found" in caplog.text
def test_run_with_connection(self, local_client, local_aoai_connection, pf):
# remove connection file to test connection resolving
os.environ.pop(PROMPTFLOW_CONNECTIONS)
result = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"url": "${data.url}"},
)
local_storage = LocalStorageOperations(result)
detail = local_storage.load_detail()
tuning_node = next((x for x in detail["node_runs"] if x["node"] == "summarize_text_content"), None)
# used default variant config
assert tuning_node["inputs"]["temperature"] == 0.3
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
def test_run_with_connection_overwrite(self, local_client, local_aoai_connection, local_alt_aoai_connection, pf):
result = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
connections={"classify_with_llm": {"connection": "new_ai_connection"}},
)
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
def test_custom_connection_overwrite(self, local_client, local_custom_connection, pf):
result = pf.run(
flow=f"{FLOWS_DIR}/custom_connection_flow",
data=f"{DATAS_DIR}/env_var_names.jsonl",
connections={"print_env": {"connection": "test_custom_connection"}},
)
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
# overwrite non-exist connection
with pytest.raises(InvalidFlowError) as e:
pf.run(
flow=f"{FLOWS_DIR}/custom_connection_flow",
data=f"{DATAS_DIR}/env_var_names.jsonl",
connections={"print_env": {"new_connection": "test_custom_connection"}},
)
assert "Connection with name new_connection not found" in str(e.value)
def test_basic_flow_with_package_tool_with_custom_strong_type_connection(
self, install_custom_tool_pkg, local_client, pf
):
result = pf.run(
flow=f"{FLOWS_DIR}/flow_with_package_tool_with_custom_strong_type_connection",
data=f"{FLOWS_DIR}/flow_with_package_tool_with_custom_strong_type_connection/data.jsonl",
connections={"My_First_Tool_00f8": {"connection": "custom_strong_type_connection"}},
)
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
def test_basic_flow_with_script_tool_with_custom_strong_type_connection(
self, install_custom_tool_pkg, local_client, pf
):
# Prepare custom connection
from promptflow.connections import CustomConnection
conn = CustomConnection(name="custom_connection_2", secrets={"api_key": "test"}, configs={"api_url": "test"})
local_client.connections.create_or_update(conn)
result = pf.run(
flow=f"{FLOWS_DIR}/flow_with_script_tool_with_custom_strong_type_connection",
data=f"{FLOWS_DIR}/flow_with_script_tool_with_custom_strong_type_connection/data.jsonl",
)
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
def test_run_with_connection_overwrite_non_exist(self, local_client, local_aoai_connection, pf):
# overwrite non_exist connection
with pytest.raises(ConnectionNotFoundError):
pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=f"{DATAS_DIR}/webClassification1.jsonl",
connections={"classify_with_llm": {"connection": "Not_exist"}},
)
def test_run_reference_failed_run(self, pf):
failed_run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"text": "${data.url}"},
)
# "update" run status to failed since currently all run will be completed unless there's bug
pf.runs.update(
name=failed_run.name,
status="Failed",
)
run_name = str(uuid.uuid4())
with pytest.raises(UserErrorException) as e:
pf.run(
name=run_name,
flow=f"{FLOWS_DIR}/custom_connection_flow",
run=failed_run,
connections={"print_env": {"connection": "test_custom_connection"}},
)
assert "is not completed, got status" in str(e.value)
# run should not be created
with pytest.raises(RunNotFoundError):
pf.runs.get(name=run_name)
def test_referenced_output_not_exist(self, pf: PFClient) -> None:
# failed run won't generate output
failed_run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"text": "${data.url}"},
)
run_name = str(uuid.uuid4())
run = pf.run(
name=run_name,
run=failed_run,
flow=f"{FLOWS_DIR}/failed_flow",
column_mapping={"text": "${run.outputs.text}"},
)
assert_run_with_invalid_column_mapping(pf, run)
def test_connection_overwrite_file(self, local_client, local_aoai_connection):
run = create_yaml_run(
source=f"{RUNS_DIR}/run_with_connections.yaml",
)
run = local_client.runs.get(name=run.name)
assert run.status == "Completed"
def test_connection_overwrite_model(self, local_client, local_aoai_connection):
run = create_yaml_run(
source=f"{RUNS_DIR}/run_with_connections_model.yaml",
)
run = local_client.runs.get(name=run.name)
assert run.status == "Completed"
def test_resolve_connection(self, local_client, local_aoai_connection):
flow = load_flow(f"{FLOWS_DIR}/web_classification_no_variants")
connections = SubmitterHelper.resolve_connections(flow, local_client)
assert local_aoai_connection.name in connections
def test_run_with_env_overwrite(self, local_client, local_aoai_connection):
run = create_yaml_run(
source=f"{RUNS_DIR}/run_with_env.yaml",
)
outputs = local_client.runs._get_outputs(run=run)
assert outputs["output"][0] == local_aoai_connection.api_base
def test_pf_run_with_env_overwrite(self, local_client, local_aoai_connection, pf):
run = pf.run(
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
)
outputs = local_client.runs._get_outputs(run=run)
assert outputs["output"][0] == local_aoai_connection.api_base
def test_eval_run_not_exist(self, pf):
name = str(uuid.uuid4())
with pytest.raises(RunNotFoundError) as e:
pf.runs.create_or_update(
run=Run(
name=name,
flow=Path(f"{FLOWS_DIR}/classification_accuracy_evaluation"),
run="not_exist",
column_mapping={
"groundtruth": "${data.answer}",
"prediction": "${run.outputs.category}",
# evaluation reference run.inputs
"url": "${run.inputs.url}",
},
)
)
assert "Run name 'not_exist' cannot be found" in str(e.value)
# run should not be created
with pytest.raises(RunNotFoundError):
pf.runs.get(name=name)
def test_eval_run_data_deleted(self, pf):
with tempfile.TemporaryDirectory() as temp_dir:
shutil.copy(f"{DATAS_DIR}/env_var_names.jsonl", temp_dir)
result = pf.run(
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{temp_dir}/env_var_names.jsonl",
)
assert pf.runs.get(result.name).status == "Completed"
# delete original run's input data
os.remove(f"{temp_dir}/env_var_names.jsonl")
with pytest.raises(UserErrorException) as e:
pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
run=result.name,
column_mapping={
"prediction": "${run.outputs.output}",
# evaluation reference run.inputs
# NOTE: we need this value to guard behavior when a run reference another run's inputs
"variant_id": "${run.inputs.key}",
# can reference other columns in data which doesn't exist in base run's inputs
"groundtruth": "${run.inputs.extra_key}",
},
)
assert "Please make sure it exists and not deleted." in str(e.value)
def test_eval_run_data_not_exist(self, pf):
base_run = pf.run(
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
)
assert pf.runs.get(base_run.name).status == "Completed"
eval_run = pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
run=base_run.name,
column_mapping={
"prediction": "${run.outputs.output}",
# evaluation reference run.inputs
# NOTE: we need this value to guard behavior when a run reference another run's inputs
"variant_id": "${run.inputs.key}",
# can reference other columns in data which doesn't exist in base run's inputs
"groundtruth": "${run.inputs.extra_key}",
},
)
result = pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
run=eval_run.name,
column_mapping={
"prediction": "${run.outputs.output}",
# evaluation reference run.inputs
# NOTE: we need this value to guard behavior when a run reference another run's inputs
"variant_id": "${run.inputs.key}",
# can reference other columns in data which doesn't exist in base run's inputs
"groundtruth": "${run.inputs.extra_key}",
},
)
# Run failed because run inputs data is None, and error will be in the run output error.json
assert result.status == "Failed"
def test_create_run_with_tags(self, pf):
name = str(uuid.uuid4())
display_name = "test_run_with_tags"
tags = {"key1": "tag1"}
run = pf.run(
name=name,
display_name=display_name,
tags=tags,
flow=f"{FLOWS_DIR}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
)
assert run.name == name
assert "test_run_with_tags" == run.display_name
assert run.tags == tags
def test_run_display_name(self, pf):
# use run name if not specify display_name
run = pf.runs.create_or_update(
run=Run(
flow=Path(f"{FLOWS_DIR}/print_env_var"),
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
)
)
assert run.display_name == run.name
assert "print_env_var" in run.display_name
# will respect if specified in run
base_run = pf.runs.create_or_update(
run=Run(
flow=Path(f"{FLOWS_DIR}/print_env_var"),
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
display_name="my_run",
)
)
assert base_run.display_name == "my_run"
run = pf.runs.create_or_update(
run=Run(
flow=Path(f"{FLOWS_DIR}/print_env_var"),
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
display_name="my_run_${variant_id}_${run}",
run=base_run,
)
)
assert run.display_name == f"my_run_variant_0_{base_run.name}"
run = pf.runs.create_or_update(
run=Run(
flow=Path(f"{FLOWS_DIR}/print_env_var"),
data=f"{DATAS_DIR}/env_var_names.jsonl",
environment_variables={"API_BASE": "${azure_open_ai_connection.api_base}"},
display_name="my_run_${timestamp}",
run=base_run,
)
)
assert "${timestamp}" not in run.display_name
def test_run_dump(self, azure_open_ai_connection: AzureOpenAIConnection, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
# in fact, `pf.run` will internally query the run from db in `RunSubmitter`
# explicitly call ORM get here to emphasize the dump operatoin
# if no dump operation, a RunNotFoundError will be raised here
pf.runs.get(run.name)
def test_run_list(self, azure_open_ai_connection: AzureOpenAIConnection, pf) -> None:
# create a run to ensure there is at least one run in the db
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
# not specify `max_result` here, so that if there are legacy runs in the db
# list runs API can collect them, and can somehow cover legacy schema
runs = pf.runs.list()
assert len(runs) >= 1
def test_stream_run_summary(self, azure_open_ai_connection: AzureOpenAIConnection, local_client, capfd, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(flow=f"{FLOWS_DIR}/web_classification", data=data_path)
local_client.runs.stream(run.name)
out, _ = capfd.readouterr()
print(out)
assert 'Run status: "Completed"' in out
assert "Output path: " in out
def test_stream_incomplete_run_summary(
self, azure_open_ai_connection: AzureOpenAIConnection, local_client, capfd, pf
) -> None:
# use wrong data to create a failed run
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
name = str(uuid.uuid4())
run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=data_path,
column_mapping={"text": "${data.url}"},
name=name,
)
local_client.runs.stream(run.name)
# assert error message in stream API
out, _ = capfd.readouterr()
assert 'Run status: "Completed"' in out
# won't print exception, use can get it from run._to_dict()
# assert "failed with exception" in out
def test_run_data_not_provided(self, pf):
with pytest.raises(ValueError) as e:
pf.run(
flow=f"{FLOWS_DIR}/web_classification",
)
assert "at least one of data or run must be provided" in str(e)
def test_get_details(self, azure_open_ai_connection: AzureOpenAIConnection, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=data_path,
column_mapping={"url": "${data.url}"},
)
from promptflow._sdk.operations._local_storage_operations import LocalStorageOperations
local_storage = LocalStorageOperations(run)
# there should be line_number in original DataFrame, but not in details DataFrame
# as we will set index on line_number to ensure the order
outputs = pd.read_json(local_storage._outputs_path, orient="records", lines=True)
details = pf.get_details(run)
assert "line_number" in outputs and "line_number" not in details
def test_visualize_run(self, azure_open_ai_connection: AzureOpenAIConnection, pf) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run1 = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=data_path,
column_mapping={"url": "${data.url}"},
)
run2 = pf.run(
flow=f"{FLOWS_DIR}/classification_accuracy_evaluation",
data=data_path,
run=run1.name,
column_mapping={
"groundtruth": "${data.answer}",
"prediction": "${run.outputs.category}",
"variant_id": "${data.variant_id}",
},
)
pf.visualize([run1, run2])
def test_incomplete_run_visualize(
self, azure_open_ai_connection: AzureOpenAIConnection, pf, capfd: pytest.CaptureFixture
) -> None:
failed_run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"text": "${data.url}"},
)
# "update" run status to failed since currently all run will be completed unless there's bug
pf.runs.update(
name=failed_run.name,
status="Failed",
)
# patch logger.error to print, so that we can capture the error message using capfd
from promptflow._sdk.operations import _run_operations
_run_operations.logger.error = print
pf.visualize(failed_run)
captured = capfd.readouterr()
expected_error_message = (
f"Cannot visualize non-completed run. Run {failed_run.name!r} is not completed, the status is 'Failed'."
)
assert expected_error_message in captured.out
def test_flow_bulk_run_with_additional_includes(self, azure_open_ai_connection: AzureOpenAIConnection, pf):
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(flow=f"{FLOWS_DIR}/web_classification_with_additional_include", data=data_path)
additional_includes = _get_additional_includes(run.flow / "flow.dag.yaml")
snapshot_path = Path.home() / ".promptflow" / ".runs" / run.name / "snapshot"
for item in additional_includes:
assert (snapshot_path / Path(item).name).exists()
# Addition includes in snapshot is removed
additional_includes = _get_additional_includes(snapshot_path / "flow.dag.yaml")
assert not additional_includes
def test_input_mapping_source_not_found_error(self, azure_open_ai_connection: AzureOpenAIConnection, pf):
# input_mapping source not found error won't create run
name = str(uuid.uuid4())
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=data_path,
column_mapping={"not_exist": "${data.not_exist_key}"},
name=name,
)
assert_run_with_invalid_column_mapping(pf, run)
def test_input_mapping_with_dict(self, azure_open_ai_connection: AzureOpenAIConnection, pf):
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=data_path,
column_mapping={"key": {"value": "1"}, "url": "${data.url}"},
)
outputs = pf.runs._get_outputs(run=run)
assert "dict" in outputs["output"][0]
def test_run_exist_error(self, pf):
name = str(uuid.uuid4())
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
pf.run(
name=name,
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=data_path,
column_mapping={"key": {"value": "1"}, "url": "${data.url}"},
)
# create a new run won't affect original run
with pytest.raises(RunExistsError):
pf.run(
name=name,
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=data_path,
column_mapping={"key": {"value": "1"}, "url": "${data.url}"},
)
run = pf.runs.get(name)
assert run.status == RunStatus.COMPLETED
assert not os.path.exists(run._output_path / LocalStorageFilenames.EXCEPTION)
def test_run_local_storage_structure(self, local_client, pf) -> None:
run = create_run_against_multi_line_data(pf)
local_storage = LocalStorageOperations(local_client.runs.get(run.name))
run_output_path = local_storage.path
assert (Path(run_output_path) / "flow_outputs").is_dir()
assert (Path(run_output_path) / "flow_outputs" / "output.jsonl").is_file()
assert (Path(run_output_path) / "flow_artifacts").is_dir()
# 3 line runs for webClassification3.jsonl
assert len([_ for _ in (Path(run_output_path) / "flow_artifacts").iterdir()]) == 3
assert (Path(run_output_path) / "node_artifacts").is_dir()
# 5 nodes web classification flow DAG
assert len([_ for _ in (Path(run_output_path) / "node_artifacts").iterdir()]) == 5
def test_run_snapshot_with_flow_tools_json(self, local_client, pf) -> None:
run = create_run_against_multi_line_data(pf)
local_storage = LocalStorageOperations(local_client.runs.get(run.name))
assert (local_storage._snapshot_folder_path / ".promptflow").is_dir()
assert (local_storage._snapshot_folder_path / ".promptflow" / "flow.tools.json").is_file()
def test_get_metrics_format(self, local_client, pf) -> None:
run1 = create_run_against_multi_line_data(pf)
run2 = create_run_against_run(pf, run1)
# ensure the result is a flatten dict
assert local_client.runs.get_metrics(run2.name).keys() == {"accuracy"}
def test_get_detail_format(self, local_client, pf) -> None:
run = create_run_against_multi_line_data(pf)
# data is a jsonl file, so we can know the number of line runs
with open(f"{DATAS_DIR}/webClassification3.jsonl", "r") as f:
data = f.readlines()
number_of_lines = len(data)
local_storage = LocalStorageOperations(local_client.runs.get(run.name))
detail = local_storage.load_detail()
assert isinstance(detail, dict)
# flow runs
assert "flow_runs" in detail
assert isinstance(detail["flow_runs"], list)
assert len(detail["flow_runs"]) == number_of_lines
# node runs
assert "node_runs" in detail
assert isinstance(detail["node_runs"], list)
def test_run_logs(self, pf):
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(
flow=f"{FLOWS_DIR}/flow_with_user_output",
data=data_path,
column_mapping={"key": {"value": "1"}, "url": "${data.url}"},
)
local_storage = LocalStorageOperations(run=run)
logs = local_storage.logger.get_logs()
# For Batch run, the executor uses bulk logger to print logs, and only prints the error log of the nodes.
existing_keywords = ["execution", "execution.bulk", "WARNING", "error log"]
assert all([keyword in logs for keyword in existing_keywords])
non_existing_keywords = ["execution.flow", "user log"]
assert all([keyword not in logs for keyword in non_existing_keywords])
def test_get_detail_against_partial_fail_run(self, pf) -> None:
run = pf.run(
flow=f"{FLOWS_DIR}/partial_fail",
data=f"{FLOWS_DIR}/partial_fail/data.jsonl",
)
detail = pf.runs.get_details(name=run.name)
detail.fillna("", inplace=True)
assert len(detail) == 3
def test_flow_with_only_static_values(self, pf):
name = str(uuid.uuid4())
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
with pytest.raises(UserErrorException) as e:
pf.run(
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=data_path,
column_mapping={"key": {"value": "1"}},
name=name,
)
assert "Column mapping must contain at least one mapping binding" in str(e.value)
# run should not be created
with pytest.raises(RunNotFoundError):
pf.runs.get(name=name)
def test_error_message_dump(self, pf):
failed_run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"text": "${data.url}"},
)
# even if all lines failed, the bulk run's status is completed.
assert failed_run.status == "Completed"
# error messages will store in local
local_storage = LocalStorageOperations(failed_run)
assert os.path.exists(local_storage._exception_path)
exception = local_storage.load_exception()
assert "Failed to run 1/1 lines. First error message is" in exception["message"]
# line run failures will be stored in additionalInfo
assert len(exception["additionalInfo"][0]["info"]["errors"]) == 1
# show run will get error message
run = pf.runs.get(name=failed_run.name)
run_dict = run._to_dict()
assert "error" in run_dict
assert run_dict["error"] == exception
@pytest.mark.skipif(RecordStorage.is_replaying_mode(), reason="System metrics not supported in replaying mode")
def test_system_metrics_in_properties(self, pf) -> None:
run = create_run_against_multi_line_data(pf)
assert FlowRunProperties.SYSTEM_METRICS in run.properties
assert isinstance(run.properties[FlowRunProperties.SYSTEM_METRICS], dict)
assert "total_tokens" in run.properties[FlowRunProperties.SYSTEM_METRICS]
def test_run_get_inputs(self, pf):
# inputs should be persisted when defaults are used
run = pf.run(
flow=f"{FLOWS_DIR}/default_input",
data=f"{DATAS_DIR}/webClassification1.jsonl",
)
inputs = pf.runs._get_inputs(run=run)
assert inputs == {
"line_number": [0],
"input_bool": [False],
"input_dict": [{}],
"input_list": [[]],
"input_str": ["input value from default"],
}
# inputs should be persisted when data value are used
run = pf.run(
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=f"{DATAS_DIR}/dictInput1.jsonl",
)
inputs = pf.runs._get_inputs(run=run)
assert inputs == {"key": [{"key": "value in data"}], "line_number": [0]}
# inputs should be persisted when column-mapping are used
run = pf.run(
flow=f"{FLOWS_DIR}/flow_with_dict_input",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"key": {"value": "value in column-mapping"}, "url": "${data.url}"},
)
inputs = pf.runs._get_inputs(run=run)
assert inputs == {
"key": [{"value": "value in column-mapping"}],
"line_number": [0],
"url": ["https://www.youtube.com/watch?v=o5ZQyXaAv1g"],
}
def test_executor_logs_in_batch_run_logs(self, pf) -> None:
run = create_run_against_multi_line_data_without_llm(pf)
local_storage = LocalStorageOperations(run=run)
logs = local_storage.logger.get_logs()
# below warning is printed by executor before the batch run executed
# the warning message results from we do not use column mapping
# so it is expected to be printed here
assert "Starting run without column mapping may lead to unexpected results." in logs
def test_basic_image_flow_bulk_run(self, pf, local_client) -> None:
image_flow_path = f"{FLOWS_DIR}/python_tool_with_simple_image"
data_path = f"{image_flow_path}/image_inputs/inputs.jsonl"
result = pf.run(flow=image_flow_path, data=data_path, column_mapping={"image": "${data.image}"})
run = local_client.runs.get(name=result.name)
assert run.status == "Completed"
assert "error" not in run._to_dict()
def test_python_tool_with_composite_image(self, pf) -> None:
image_flow_path = f"{FLOWS_DIR}/python_tool_with_composite_image"
data_path = f"{image_flow_path}/inputs.jsonl"
result = pf.run(
flow=image_flow_path,
data=data_path,
column_mapping={
"image_list": "${data.image_list}",
"image_dict": "${data.image_dict}",
},
)
run = pf.runs.get(name=result.name)
assert run.status == "Completed"
# no error when processing lines
assert "error" not in run._to_dict()
# test input from output
result = pf.run(
run=result,
flow=image_flow_path,
column_mapping={
"image_list": "${run.outputs.output}"
# image dict will use default value, which is relative to flow's folder
},
)
run = pf.runs.get(name=result.name)
assert run.status == "Completed"
# no error when processing lines
assert "error" not in run._to_dict()
def test_image_without_default(self, pf):
image_flow_path = f"{FLOWS_DIR}/python_tool_with_simple_image_without_default"
data_path = f"{DATAS_DIR}/image_inputs"
result = pf.run(
flow=image_flow_path,
data=data_path,
column_mapping={
"image_1": "${data.image}",
"image_2": "${data.image}",
},
)
run = pf.runs.get(name=result.name)
assert run.status == "Completed", run.name
# no error when processing lines
assert "error" not in run._to_dict(), run.name
def test_get_details_for_image_in_flow(self, pf) -> None:
image_flow_path = f"{FLOWS_DIR}/python_tool_with_simple_image"
data_path = f"{image_flow_path}/image_inputs/inputs.jsonl"
run = pf.run(
flow=image_flow_path,
data=data_path,
column_mapping={"image": "${data.image}"},
)
details = pf.get_details(run.name)
for i in range(len(details)):
input_image_path = details["inputs.image"][i]["data:image/png;path"]
assert Path(input_image_path).is_absolute()
output_image_path = details["outputs.output"][i]["data:image/png;path"]
assert Path(output_image_path).is_absolute()
def test_stream_raise_on_error_false(self, pf: PFClient, capfd: pytest.CaptureFixture) -> None:
data_path = f"{DATAS_DIR}/webClassification3.jsonl"
run = pf.run(
flow=f"{FLOWS_DIR}/web_classification",
data=data_path,
column_mapping={"not_exist": "${data.not_exist_key}"},
name=str(uuid.uuid4()),
)
# raise_on_error=False, will print error message in stdout
pf.stream(run.name, raise_on_error=False)
out, _ = capfd.readouterr()
assert "The input for batch run is incorrect. Couldn't find these mapping relations" in out
def test_stream_canceled_run(self, pf: PFClient, capfd: pytest.CaptureFixture) -> None:
run = create_run_against_multi_line_data_without_llm(pf)
pf.runs.update(name=run.name, status=RunStatus.CANCELED)
# (default) raise_on_error=True
with pytest.raises(InvalidRunStatusError):
pf.stream(run.name)
# raise_on_error=False
pf.stream(run.name, raise_on_error=False)
out, _ = capfd.readouterr()
assert "Run is canceled." in out
def test_specify_run_output_path(self, pf: PFClient, mocker: MockerFixture) -> None:
# mock to imitate user specify config run.output_path
specified_run_output_path = (Path.home() / PROMPT_FLOW_DIR_NAME / ".mock").resolve().as_posix()
with mocker.patch(
"promptflow._sdk._configuration.Configuration.get_run_output_path",
return_value=specified_run_output_path,
):
run = create_run_against_multi_line_data_without_llm(pf)
local_storage = LocalStorageOperations(run=run)
expected_output_path_prefix = (Path(specified_run_output_path) / run.name).resolve().as_posix()
assert local_storage.outputs_folder.as_posix().startswith(expected_output_path_prefix)
def test_override_run_output_path_in_pf_client(self) -> None:
specified_run_output_path = (Path.home() / PROMPT_FLOW_DIR_NAME / ".another_mock").resolve().as_posix()
pf = PFClient(config={"run.output_path": specified_run_output_path})
run = create_run_against_multi_line_data_without_llm(pf)
local_storage = LocalStorageOperations(run=run)
expected_output_path_prefix = (Path(specified_run_output_path) / run.name).resolve().as_posix()
assert local_storage.outputs_folder.as_posix().startswith(expected_output_path_prefix)
def test_specify_run_output_path_with_macro(self, pf: PFClient, mocker: MockerFixture) -> None:
# mock to imitate user specify invalid config run.output_path
with mocker.patch(
"promptflow._sdk._configuration.Configuration.get_run_output_path",
return_value=f"{FLOW_DIRECTORY_MACRO_IN_CONFIG}/.promptflow",
):
for _ in range(3):
run = create_run_against_multi_line_data_without_llm(pf)
local_storage = LocalStorageOperations(run=run)
expected_path_prefix = Path(FLOWS_DIR) / "print_env_var" / ".promptflow" / run.name
expected_path_prefix = expected_path_prefix.resolve().as_posix()
assert local_storage.outputs_folder.as_posix().startswith(expected_path_prefix)
def test_specify_run_output_path_with_invalid_macro(self, pf: PFClient, mocker: MockerFixture) -> None:
# mock to imitate user specify invalid config run.output_path
with mocker.patch(
"promptflow._sdk._configuration.Configuration.get_run_output_path",
# this case will happen when user manually modifies ~/.promptflow/pf.yaml
return_value=f"{FLOW_DIRECTORY_MACRO_IN_CONFIG}",
):
run = create_run_against_multi_line_data_without_llm(pf)
# as the specified run output path is invalid
# the actual run output path will be the default value
local_storage = LocalStorageOperations(run=run)
expected_output_path_prefix = (Path.home() / PROMPT_FLOW_DIR_NAME / ".runs" / run.name).resolve().as_posix()
assert local_storage.outputs_folder.as_posix().startswith(expected_output_path_prefix)
def test_failed_run_to_dict_exclude(self, pf):
failed_run = pf.run(
flow=f"{FLOWS_DIR}/failed_flow",
data=f"{DATAS_DIR}/webClassification1.jsonl",
column_mapping={"text": "${data.url}"},
)
default = failed_run._to_dict()
# CLI will exclude additional info and debug info
exclude = failed_run._to_dict(exclude_additional_info=True, exclude_debug_info=True)
assert "additionalInfo" in default["error"] and "additionalInfo" not in exclude["error"]
assert "debugInfo" in default["error"] and "debugInfo" not in exclude["error"]
def test_create_run_with_existing_run_folder(self, pf):
# TODO: Should use fixture to create an run and download it to be used here.
run_name = "web_classification_variant_0_20231205_120253_104100"
# clean the run if exists
from promptflow._cli._utils import _try_delete_existing_run_record
_try_delete_existing_run_record(run_name)
# assert the run doesn't exist
with pytest.raises(RunNotFoundError):
pf.runs.get(run_name)
# create the run with run folder
run_folder = f"{RUNS_DIR}/{run_name}"
run = Run._load_from_source(source=run_folder)
pf.runs.create_or_update(run)
# test with other local run operations
run = pf.runs.get(run_name)
assert run.name == run_name
details = pf.get_details(run_name)
assert details.shape == (3, 5)
metrics = pf.runs.get_metrics(run_name)
assert metrics == {}
pf.stream(run_name)
pf.visualize([run_name])
def test_aggregation_node_failed(self, pf):
failed_run = pf.run(
flow=f"{FLOWS_DIR}/aggregation_node_failed",
data=f"{FLOWS_DIR}/aggregation_node_failed/data.jsonl",
)
# even if all lines failed, the bulk run's status is completed.
assert failed_run.status == "Completed"
# error messages will store in local
local_storage = LocalStorageOperations(failed_run)
assert os.path.exists(local_storage._exception_path)
exception = local_storage.load_exception()
assert "First error message is" in exception["message"]
# line run failures will be stored in additionalInfo
assert len(exception["additionalInfo"][0]["info"]["errors"]) == 1
# show run will get error message
run = pf.runs.get(name=failed_run.name)
run_dict = run._to_dict()
assert "error" in run_dict
assert run_dict["error"] == exception
def test_get_details_against_partial_completed_run(self, pf: PFClient, monkeypatch) -> None:
# TODO: remove this patch after executor switch to default spawn
monkeypatch.setenv("PF_BATCH_METHOD", "spawn")
flow_mod2 = f"{FLOWS_DIR}/mod-n/two"
flow_mod3 = f"{FLOWS_DIR}/mod-n/three"
data_path = f"{DATAS_DIR}/numbers.jsonl"
# batch run against data
run1 = pf.run(
flow=flow_mod2,
data=data_path,
column_mapping={"number": "${data.value}"},
)
pf.runs.stream(run1)
details1 = pf.get_details(run1)
assert len(details1) == 20
assert len(details1.loc[details1["outputs.output"] != "(Failed)"]) == 10
# assert to ensure inputs and outputs are aligned
for _, row in details1.iterrows():
if str(row["outputs.output"]) != "(Failed)":
assert int(row["inputs.number"]) == int(row["outputs.output"])
# batch run against previous run
run2 = pf.run(
flow=flow_mod3,
run=run1,
column_mapping={"number": "${run.outputs.output}"},
)
pf.runs.stream(run2)
details2 = pf.get_details(run2)
assert len(details2) == 10
assert len(details2.loc[details2["outputs.output"] != "(Failed)"]) == 4
# assert to ensure inputs and outputs are aligned
for _, row in details2.iterrows():
if str(row["outputs.output"]) != "(Failed)":
assert int(row["inputs.number"]) == int(row["outputs.output"])
monkeypatch.delenv("PF_BATCH_METHOD")
def test_flow_with_nan_inf(self, pf: PFClient, monkeypatch) -> None:
# TODO: remove this patch after executor switch to default spawn
monkeypatch.setenv("PF_BATCH_METHOD", "spawn")
run = pf.run(
flow=f"{FLOWS_DIR}/flow-with-nan-inf",
data=f"{DATAS_DIR}/numbers.jsonl",
column_mapping={"number": "${data.value}"},
)
pf.stream(run)
local_storage = LocalStorageOperations(run=run)
# default behavior: no special logic for nan and inf
detail = local_storage.load_detail()
first_line_run_output = detail["flow_runs"][0]["output"]["output"]
assert isinstance(first_line_run_output["nan"], float)
assert np.isnan(first_line_run_output["nan"])
assert isinstance(first_line_run_output["inf"], float)
assert np.isinf(first_line_run_output["inf"])
# handles nan and inf, which is real scenario during visualize
detail = local_storage.load_detail(parse_const_as_str=True)
first_line_run_output = detail["flow_runs"][0]["output"]["output"]
assert isinstance(first_line_run_output["nan"], str)
assert first_line_run_output["nan"] == "NaN"
assert isinstance(first_line_run_output["inf"], str)
assert first_line_run_output["inf"] == "Infinity"
monkeypatch.delenv("PF_BATCH_METHOD")
@pytest.mark.skip("Enable this when executor change merges")
def test_eager_flow_run_without_yaml(self, pf):
# TODO(2898455): support this
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_without_yaml/entry.py")
run = pf.run(
flow=flow_path,
entry="my_flow",
data=f"{DATAS_DIR}/simple_eager_flow_data.jsonl",
)
assert run.status == "Completed"
def test_eager_flow_run_with_yaml(self, pf):
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_with_yaml")
run = pf.run(
flow=flow_path,
data=f"{DATAS_DIR}/simple_eager_flow_data.jsonl",
)
assert run.status == "Completed"
def test_eager_flow_test_invalid_cases(self, pf):
# no entry provided
flow_path = Path(f"{EAGER_FLOWS_DIR}/simple_without_yaml/entry.py")
with pytest.raises(UserErrorException) as e:
pf.run(
flow=flow_path,
data=f"{DATAS_DIR}/simple_eager_flow_data.jsonl",
)
assert "Entry function is not specified" in str(e.value)
# no path provided
flow_path = Path(f"{EAGER_FLOWS_DIR}/invalid_no_path/")
with pytest.raises(ValidationError) as e:
pf.run(
flow=flow_path,
data=f"{DATAS_DIR}/simple_eager_flow_data.jsonl",
)
assert "'path': ['Missing data for required field.']" in str(e.value)
def test_get_incomplete_run(self, local_client, pf) -> None:
with tempfile.TemporaryDirectory() as temp_dir:
shutil.copytree(f"{FLOWS_DIR}/print_env_var", f"{temp_dir}/print_env_var")
run = pf.run(
flow=f"{temp_dir}/print_env_var",
data=f"{DATAS_DIR}/env_var_names.jsonl",
)
# remove run dag
shutil.rmtree(f"{temp_dir}/print_env_var")
# can still get run operations
LocalStorageOperations(run=run)
# can to_dict
run._to_dict()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_serve.py | import json
import os
import re
import pytest
from promptflow._core.operation_context import OperationContext
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_swagger(flow_serving_client):
swagger_dict = json.loads(flow_serving_client.get("/swagger.json").data.decode())
assert swagger_dict == {
"components": {"securitySchemes": {"bearerAuth": {"scheme": "bearer", "type": "http"}}},
"info": {
"title": "Promptflow[basic-with-connection] API",
"version": "1.0.0",
"x-flow-name": "basic-with-connection",
},
"openapi": "3.0.0",
"paths": {
"/score": {
"post": {
"requestBody": {
"content": {
"application/json": {
"example": {"text": "Hello World!"},
"schema": {
"properties": {"text": {"type": "string"}},
"required": ["text"],
"type": "object",
},
}
},
"description": "promptflow input data",
"required": True,
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"properties": {"output_prompt": {"type": "string"}}, "type": "object"}
}
},
"description": "successful operation",
},
"400": {"description": "Invalid input"},
"default": {"description": "unexpected error"},
},
"summary": "run promptflow: basic-with-connection with an given input",
}
}
},
"security": [{"bearerAuth": []}],
}
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_chat_swagger(serving_client_llm_chat):
swagger_dict = json.loads(serving_client_llm_chat.get("/swagger.json").data.decode())
assert swagger_dict == {
"components": {"securitySchemes": {"bearerAuth": {"scheme": "bearer", "type": "http"}}},
"info": {
"title": "Promptflow[chat_flow_with_stream_output] API",
"version": "1.0.0",
"x-flow-name": "chat_flow_with_stream_output",
"x-chat-history": "chat_history",
"x-chat-input": "question",
"x-flow-type": "chat",
"x-chat-output": "answer",
},
"openapi": "3.0.0",
"paths": {
"/score": {
"post": {
"requestBody": {
"content": {
"application/json": {
"example": {},
"schema": {
"properties": {
"chat_history": {
"type": "array",
"items": {"type": "object", "additionalProperties": {}},
},
"question": {"type": "string", "default": "What is ChatGPT?"},
},
"required": ["chat_history", "question"],
"type": "object",
},
}
},
"description": "promptflow input data",
"required": True,
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {"properties": {"answer": {"type": "string"}}, "type": "object"}
}
},
"description": "successful operation",
},
"400": {"description": "Invalid input"},
"default": {"description": "unexpected error"},
},
"summary": "run promptflow: chat_flow_with_stream_output with an given input",
}
}
},
"security": [{"bearerAuth": []}],
}
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_user_agent(flow_serving_client):
operation_context = OperationContext.get_instance()
assert "test-user-agent" in operation_context.get_user_agent()
assert "promptflow-local-serving" in operation_context.get_user_agent()
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_serving_api(flow_serving_client):
response = flow_serving_client.get("/health")
assert b'{"status":"Healthy","version":"0.0.1"}' in response.data
response = flow_serving_client.get("/")
print(response.data)
assert response.status_code == 200
response = flow_serving_client.post("/score", data=json.dumps({"text": "hi"}))
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
assert "output_prompt" in json.loads(response.data.decode())
# Assert environment variable resolved
assert os.environ["API_TYPE"] == "azure"
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_evaluation_flow_serving_api(evaluation_flow_serving_client):
response = evaluation_flow_serving_client.post("/score", data=json.dumps({"url": "https://www.microsoft.com/"}))
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
assert "category" in json.loads(response.data.decode())
@pytest.mark.e2etest
def test_unknown_api(flow_serving_client):
response = flow_serving_client.get("/unknown")
assert b"not supported by current app" in response.data
assert response.status_code == 404
response = flow_serving_client.post("/health") # health api should be GET
assert b"not supported by current app" in response.data
assert response.status_code == 404
@pytest.mark.usefixtures("recording_injection", "setup_local_connection")
@pytest.mark.e2etest
@pytest.mark.parametrize(
"accept, expected_status_code, expected_content_type",
[
("text/event-stream", 200, "text/event-stream; charset=utf-8"),
("text/html", 406, "application/json"),
("application/json", 200, "application/json"),
("*/*", 200, "application/json"),
("text/event-stream, application/json", 200, "text/event-stream; charset=utf-8"),
("application/json, */*", 200, "application/json"),
("", 200, "application/json"),
],
)
def test_stream_llm_chat(
serving_client_llm_chat,
accept,
expected_status_code,
expected_content_type,
):
payload = {
"question": "What is the capital of France?",
"chat_history": [],
}
headers = {
"Content-Type": "application/json",
"Accept": accept,
}
response = serving_client_llm_chat.post("/score", json=payload, headers=headers)
assert response.status_code == expected_status_code
assert response.content_type == expected_content_type
if response.status_code == 406:
assert response.json["error"]["code"] == "UserError"
assert (
f"Media type {accept} in Accept header is not acceptable. Supported media type(s) -"
in response.json["error"]["message"]
)
if "text/event-stream" in response.content_type:
for line in response.data.decode().split("\n"):
print(line)
else:
result = response.json
print(result)
@pytest.mark.e2etest
@pytest.mark.parametrize(
"accept, expected_status_code, expected_content_type",
[
("text/event-stream", 200, "text/event-stream; charset=utf-8"),
("text/html", 406, "application/json"),
("application/json", 200, "application/json"),
("*/*", 200, "application/json"),
("text/event-stream, application/json", 200, "text/event-stream; charset=utf-8"),
("application/json, */*", 200, "application/json"),
("", 200, "application/json"),
],
)
def test_stream_python_stream_tools(
serving_client_python_stream_tools,
accept,
expected_status_code,
expected_content_type,
):
payload = {
"text": "Hello World!",
}
headers = {
"Content-Type": "application/json",
"Accept": accept,
}
response = serving_client_python_stream_tools.post("/score", json=payload, headers=headers)
assert response.status_code == expected_status_code
assert response.content_type == expected_content_type
# The predefined flow in this test case is echo flow, which will return the input text.
# Check output as test logic validation.
# Stream generator generating logic
# - The output is split into words, and each word is sent as a separate event
# - Event data is a dict { $flowoutput_field_name : $word}
# - The event data is formatted as f"data: {json.dumps(data)}\n\n"
# - Generator will yield the event data for each word
if response.status_code == 200:
expected_output = f"Echo: {payload.get('text')}"
if "text/event-stream" in response.content_type:
words = expected_output.split()
lines = response.data.decode().split("\n\n")
# The last line is empty
lines = lines[:-1]
assert all(f"data: {json.dumps({'output_echo' : f'{w} '})}" == l for w, l in zip(words, lines))
else:
# For json response, iterator is joined into a string with "" as delimiter
words = expected_output.split()
merged_text = "".join(word + " " for word in words)
expected_json = {"output_echo": merged_text}
result = response.json
assert expected_json == result
elif response.status_code == 406:
assert response.json["error"]["code"] == "UserError"
assert (
f"Media type {accept} in Accept header is not acceptable. Supported media type(s) -"
in response.json["error"]["message"]
)
@pytest.mark.usefixtures("recording_injection")
@pytest.mark.e2etest
@pytest.mark.parametrize(
"accept, expected_status_code, expected_content_type",
[
("text/event-stream", 406, "application/json"),
("application/json", 200, "application/json"),
("*/*", 200, "application/json"),
("text/event-stream, application/json", 200, "application/json"),
("application/json, */*", 200, "application/json"),
("", 200, "application/json"),
],
)
def test_stream_python_nonstream_tools(
flow_serving_client,
accept,
expected_status_code,
expected_content_type,
):
payload = {
"text": "Hello World!",
}
headers = {
"Content-Type": "application/json",
"Accept": accept,
}
response = flow_serving_client.post("/score", json=payload, headers=headers)
if "text/event-stream" in response.content_type:
for line in response.data.decode().split("\n"):
print(line)
else:
result = response.json
print(result)
assert response.status_code == expected_status_code
assert response.content_type == expected_content_type
@pytest.mark.usefixtures("serving_client_image_python_flow", "recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_image_flow(serving_client_image_python_flow, sample_image):
response = serving_client_image_python_flow.post("/score", data=json.dumps({"image": sample_image}))
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
response = json.loads(response.data.decode())
assert {"output"} == response.keys()
key_regex = re.compile(r"data:image/(.*);base64")
assert re.match(key_regex, list(response["output"].keys())[0])
@pytest.mark.usefixtures("serving_client_composite_image_flow", "recording_injection", "setup_local_connection")
@pytest.mark.e2etest
def test_list_image_flow(serving_client_composite_image_flow, sample_image):
image_dict = {"data:image/jpg;base64": sample_image}
response = serving_client_composite_image_flow.post(
"/score", data=json.dumps({"image_list": [image_dict], "image_dict": {"my_image": image_dict}})
)
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
response = json.loads(response.data.decode())
assert {"output"} == response.keys()
assert (
"data:image/jpg;base64" in response["output"][0]
), f"data:image/jpg;base64 not in output list {response['output']}"
@pytest.mark.usefixtures("serving_client_with_environment_variables")
@pytest.mark.e2etest
def test_flow_with_environment_variables(serving_client_with_environment_variables):
except_environment_variables = {
"env1": "2",
"env2": "runtime_env2",
"env3": "[1, 2, 3, 4, 5]",
"env4": '{"a": 1, "b": "2"}',
"env10": "aaaaa",
}
for key, value in except_environment_variables.items():
response = serving_client_with_environment_variables.post("/score", data=json.dumps({"key": key}))
assert (
response.status_code == 200
), f"Response code indicates error {response.status_code} - {response.data.decode()}"
response = json.loads(response.data.decode())
assert {"output"} == response.keys()
assert response["output"] == value
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_orm.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import datetime
import json
import uuid
import pytest
from promptflow._sdk._constants import ListViewType, RunStatus, RunTypes
from promptflow._sdk._errors import RunNotFoundError
from promptflow._sdk._orm import RunInfo
@pytest.fixture()
def run_name() -> str:
name = str(uuid.uuid4())
run_info = RunInfo(
name=name,
type=RunTypes.BATCH,
created_on=datetime.datetime.now().isoformat(),
status=RunStatus.NOT_STARTED,
display_name=name,
description="",
tags=None,
properties=json.dumps({}),
)
run_info.dump()
return name
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestRunInfo:
def test_get(self, run_name: str) -> None:
run_info = RunInfo.get(run_name)
assert run_info.name == run_name
assert run_info.type == RunTypes.BATCH
assert run_info.status == RunStatus.NOT_STARTED
assert run_info.display_name == run_name
assert run_info.description == ""
assert run_info.tags is None
assert run_info.properties == json.dumps({})
def test_get_not_exist(self) -> None:
not_exist_name = str(uuid.uuid4())
with pytest.raises(RunNotFoundError) as excinfo:
RunInfo.get(not_exist_name)
assert f"Run name {not_exist_name!r} cannot be found." in str(excinfo.value)
def test_list_order_by_created_time_desc(self) -> None:
for _ in range(3):
RunInfo(
name=str(uuid.uuid4()),
created_on=datetime.datetime.now().isoformat(),
status=RunStatus.NOT_STARTED,
description="",
tags=None,
properties=json.dumps({}),
).dump()
runs = RunInfo.list(max_results=3, list_view_type=ListViewType.ALL)
# in very edge case, the created_on can be same, so use ">=" here
assert runs[0].created_on >= runs[1].created_on >= runs[2].created_on
def test_archive(self, run_name: str) -> None:
run_info = RunInfo.get(run_name)
assert run_info.archived is False
run_info.archive()
# in-memory archived flag
assert run_info.archived is True
# db archived flag
assert RunInfo.get(run_name).archived is True
def test_restore(self, run_name: str) -> None:
run_info = RunInfo.get(run_name)
run_info.archive()
run_info = RunInfo.get(run_name)
assert run_info.archived is True
run_info.restore()
# in-memory archived flag
assert run_info.archived is False
# db archived flag
assert RunInfo.get(run_name).archived is False
def test_update(self, run_name: str) -> None:
run_info = RunInfo.get(run_name)
assert run_info.status == RunStatus.NOT_STARTED
assert run_info.display_name == run_name
assert run_info.description == ""
assert run_info.tags is None
updated_status = RunStatus.COMPLETED
updated_display_name = f"updated_{run_name}"
updated_description = "updated_description"
updated_tags = [{"key1": "value1", "key2": "value2"}]
run_info.update(
status=updated_status,
display_name=updated_display_name,
description=updated_description,
tags=updated_tags,
)
# in-memory status, display_name, description and tags
assert run_info.status == updated_status
assert run_info.display_name == updated_display_name
assert run_info.description == updated_description
assert run_info.tags == json.dumps(updated_tags)
# db status, display_name, description and tags
run_info = RunInfo.get(run_name)
assert run_info.status == updated_status
assert run_info.display_name == updated_display_name
assert run_info.description == updated_description
assert run_info.tags == json.dumps(updated_tags)
def test_null_type_and_display_name(self) -> None:
# test run_info table schema change:
# 1. type can be null(we will deprecate this concept in the future)
# 2. display_name can be null as default value
name = str(uuid.uuid4())
run_info = RunInfo(
name=name,
created_on=datetime.datetime.now().isoformat(),
status=RunStatus.NOT_STARTED,
description="",
tags=None,
properties=json.dumps({}),
)
run_info.dump()
run_info_from_db = RunInfo.get(name)
assert run_info_from_db.type is None
assert run_info_from_db.display_name is None
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_flow_local_operations.py | import copy
import os.path
import re
import shutil
import tempfile
from pathlib import Path
import mock
import pytest
from promptflow._sdk._constants import FLOW_TOOLS_JSON, NODE_VARIANTS, PROMPT_FLOW_DIR_NAME, USE_VARIANTS
from promptflow._utils.yaml_utils import load_yaml
from promptflow.connections import AzureOpenAIConnection
PROMOTFLOW_ROOT = Path(__file__) / "../../../.."
TEST_ROOT = Path(__file__).parent.parent.parent
MODEL_ROOT = TEST_ROOT / "test_configs/e2e_samples"
CONNECTION_FILE = (PROMOTFLOW_ROOT / "connections.json").resolve().absolute().as_posix()
FLOWS_DIR = "./tests/test_configs/flows"
DATAS_DIR = "./tests/test_configs/datas"
def e2e_test_docker_build_and_run(output_path):
"""Build and run the docker image locally.
This function is for adhoc local test and need to run on a dev machine with docker installed.
"""
import subprocess
subprocess.check_output(["docker", "build", ".", "-t", "test"], cwd=output_path)
subprocess.check_output(["docker", "tag", "test", "elliotz/promptflow-export-result:latest"], cwd=output_path)
subprocess.check_output(
[
"docker",
"run",
"-e",
"CUSTOM_CONNECTION_AZURE_OPENAI_API_KEY='xxx'" "elliotz/promptflow-export-result:latest",
],
cwd=output_path,
)
@pytest.fixture
def setup_connections(azure_open_ai_connection: AzureOpenAIConnection):
_ = {
"azure_open_ai_connection": azure_open_ai_connection,
}
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk.entities._connection import _Connection
_client = PFClient()
_client.connections.create_or_update(
_Connection._load(
data={
"name": "custom_connection",
"type": "custom",
"configs": {
"CHAT_DEPLOYMENT_NAME": "gpt-35-turbo",
"AZURE_OPENAI_API_BASE": azure_open_ai_connection.api_base,
},
"secrets": {
"AZURE_OPENAI_API_KEY": azure_open_ai_connection.api_key,
},
}
)
)
_client.connections.create_or_update(
_Connection._load(
data={
"name": "azure_open_ai_connection",
"type": "azure_open_ai",
"api_type": azure_open_ai_connection.api_type,
"api_base": azure_open_ai_connection.api_base,
"api_version": azure_open_ai_connection.api_version,
"api_key": azure_open_ai_connection.api_key,
}
)
)
@pytest.mark.usefixtures("use_secrets_config_file", "setup_connections")
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestFlowLocalOperations:
def test_flow_build_as_docker(self, pf) -> None:
source = f"{FLOWS_DIR}/intent-copilot"
output_path = f"{FLOWS_DIR}/export/linux"
shutil.rmtree(output_path, ignore_errors=True)
(Path(source) / ".runs").mkdir(exist_ok=True)
(Path(source) / ".runs" / "dummy_run_file").touch()
with mock.patch("promptflow._sdk.operations._flow_operations.generate_random_string") as mock_random_string:
mock_random_string.return_value = "dummy1"
pf.flows.build(
flow=source,
output=output_path,
format="docker",
)
assert mock_random_string.call_count == 1
# check if .amlignore works
assert os.path.isdir(f"{source}/data")
assert not (Path(output_path) / "flow" / "data").exists()
# check if .runs is ignored by default
assert os.path.isfile(f"{source}/.runs/dummy_run_file")
assert not (Path(output_path) / "flow" / ".runs" / "dummy_run_file").exists()
# e2e_test_docker_build_and_run(output_path)
def test_flow_build_as_docker_with_additional_includes(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_with_additional_include"
with tempfile.TemporaryDirectory() as temp_dir:
pf.flows.build(
flow=source,
output=temp_dir,
format="docker",
)
for additional_include in [
"../external_files/convert_to_dict.py",
"../external_files/fetch_text_content_from_url.py",
"../external_files/summarize_text_content.jinja2",
]:
additional_include_path = Path(source, additional_include)
target_path = Path(temp_dir, "flow", additional_include_path.name)
assert target_path.is_file()
assert target_path.read_text() == additional_include_path.read_text()
def test_flow_build_flow_only(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_with_additional_include"
with tempfile.TemporaryDirectory() as temp_dir:
pf.flows.build(
flow=source,
output=temp_dir,
format="docker",
flow_only=True,
)
for additional_include in [
"../external_files/convert_to_dict.py",
"../external_files/fetch_text_content_from_url.py",
"../external_files/summarize_text_content.jinja2",
]:
additional_include_path = Path(source, additional_include)
target_path = Path(temp_dir, additional_include_path.name)
assert target_path.is_file()
assert target_path.read_text() == additional_include_path.read_text()
assert Path(temp_dir, PROMPT_FLOW_DIR_NAME, FLOW_TOOLS_JSON).is_file()
with open(Path(temp_dir, "flow.dag.yaml"), "r", encoding="utf-8") as f:
flow_dag_content = load_yaml(f)
assert NODE_VARIANTS not in flow_dag_content
assert "additional_includes" not in flow_dag_content
assert not any([USE_VARIANTS in node for node in flow_dag_content["nodes"]])
def test_flow_build_as_docker_with_variant(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_with_additional_include"
flow_dag_path = Path(source, "flow.dag.yaml")
flow_dag = load_yaml(flow_dag_path)
with tempfile.TemporaryDirectory() as temp_dir:
pf.flows.build(
flow=source,
output=temp_dir,
format="docker",
variant="${summarize_text_content.variant_0}",
)
new_flow_dag_path = Path(temp_dir, "flow", "flow.dag.yaml")
new_flow_dag = load_yaml(new_flow_dag_path)
target_node = next(filter(lambda x: x["name"] == "summarize_text_content", new_flow_dag["nodes"]))
target_node.pop("name")
assert target_node == flow_dag["node_variants"]["summarize_text_content"]["variants"]["variant_0"]["node"]
def test_flow_build_generate_flow_tools_json(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_with_additional_include"
with tempfile.TemporaryDirectory() as temp_dir:
pf.flows.build(
flow=source,
output=temp_dir,
variant="${summarize_text_content.variant_0}",
)
flow_tools_path = Path(temp_dir) / "flow" / PROMPT_FLOW_DIR_NAME / FLOW_TOOLS_JSON
assert flow_tools_path.is_file()
# package in flow.tools.json is not determined by the flow, so we don't check it here
assert load_yaml(flow_tools_path)["code"] == {
"classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "classify_with_llm.jinja2",
"type": "llm",
},
"convert_to_dict.py": {
"function": "convert_to_dict",
"inputs": {"input_str": {"type": ["string"]}},
"source": "convert_to_dict.py",
"type": "python",
},
"fetch_text_content_from_url.py": {
"function": "fetch_text_content_from_url",
"inputs": {"url": {"type": ["string"]}},
"source": "fetch_text_content_from_url.py",
"type": "python",
},
"prepare_examples.py": {
"function": "prepare_examples",
"source": "prepare_examples.py",
"type": "python",
},
"summarize_text_content.jinja2": {
"inputs": {"text": {"type": ["string"]}},
"source": "summarize_text_content.jinja2",
"type": "llm",
},
}
def test_flow_validate_generate_flow_tools_json(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_with_additional_include"
flow_tools_path = Path(source) / PROMPT_FLOW_DIR_NAME / FLOW_TOOLS_JSON
flow_tools_path.unlink(missing_ok=True)
validation_result = pf.flows.validate(flow=source)
assert validation_result.passed
assert flow_tools_path.is_file()
# package in flow.tools.json is not determined by the flow, so we don't check it here
assert load_yaml(flow_tools_path)["code"] == {
"classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "classify_with_llm.jinja2",
"type": "llm",
},
"convert_to_dict.py": {
"function": "convert_to_dict",
"inputs": {"input_str": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "convert_to_dict.py"),
"type": "python",
},
"fetch_text_content_from_url.py": {
"function": "fetch_text_content_from_url",
"inputs": {"url": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "fetch_text_content_from_url.py"),
"type": "python",
},
"prepare_examples.py": {
"function": "prepare_examples",
"source": "prepare_examples.py",
"type": "python",
},
"summarize_text_content.jinja2": {
"inputs": {"text": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "summarize_text_content.jinja2"),
"type": "llm",
},
"summarize_text_content__variant_1.jinja2": {
"inputs": {"text": {"type": ["string"]}},
"source": "summarize_text_content__variant_1.jinja2",
"type": "llm",
},
}
def test_flow_validation_failed(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_invalid"
flow_tools_path = Path(source) / PROMPT_FLOW_DIR_NAME / FLOW_TOOLS_JSON
flow_tools_path.unlink(missing_ok=True)
validation_result = pf.flows.validate(flow=source)
error_messages = copy.deepcopy(validation_result.error_messages)
assert "Failed to load python module from file" in error_messages.pop("nodes.2.source.path", "")
for yaml_path in [
"node_variants.summarize_text_content.variants.variant_0.node.source.path",
"nodes.1.source.path",
]:
assert re.search(r"Meta file '.*' can not be found.", error_messages.pop(yaml_path, ""))
assert error_messages == {
"inputs.url.type": "Missing data for required field.",
"outputs.category.type": "Missing data for required field.",
}
assert "line 22" in repr(validation_result)
assert flow_tools_path.is_file()
flow_tools = load_yaml(flow_tools_path)
assert "code" in flow_tools
assert flow_tools["code"] == {
"classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "classify_with_llm.jinja2",
"type": "prompt",
},
"./classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "./classify_with_llm.jinja2",
"type": "llm",
},
"convert_to_dict.py": {
"function": "convert_to_dict",
"inputs": {"input_str": {"type": ["string"]}},
"source": "convert_to_dict.py",
"type": "python",
},
"fetch_text_content_from_url.py": {
"function": "fetch_text_content_from_url",
"inputs": {"url": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "fetch_text_content_from_url.py"),
"type": "python",
},
"summarize_text_content__variant_1.jinja2": {
"inputs": {"text": {"type": ["string"]}},
"source": "summarize_text_content__variant_1.jinja2",
"type": "llm",
},
}
def test_flow_generate_tools_meta(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_invalid"
tools_meta, tools_error = pf.flows._generate_tools_meta(source)
assert tools_meta["code"] == {
"classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "classify_with_llm.jinja2",
"type": "prompt",
},
"./classify_with_llm.jinja2": {
"inputs": {
"examples": {"type": ["string"]},
"text_content": {"type": ["string"]},
"url": {"type": ["string"]},
},
"source": "./classify_with_llm.jinja2",
"type": "llm",
},
"convert_to_dict.py": {
"function": "convert_to_dict",
"inputs": {"input_str": {"type": ["string"]}},
"source": "convert_to_dict.py",
"type": "python",
},
"fetch_text_content_from_url.py": {
"function": "fetch_text_content_from_url",
"inputs": {"url": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "fetch_text_content_from_url.py"),
"type": "python",
},
"summarize_text_content__variant_1.jinja2": {
"inputs": {"text": {"type": ["string"]}},
"source": "summarize_text_content__variant_1.jinja2",
"type": "llm",
},
}
# promptflow-tools is not installed in ci
# assert list(tools_meta["package"]) == ["promptflow.tools.azure_translator.get_translation"]
assert "Failed to load python module from file" in tools_error.pop("prepare_examples.py", "")
assert re.search(r"Meta file '.*' can not be found.", tools_error.pop("summarize_text_content.jinja2", ""))
assert tools_error == {}
tools_meta, tools_error = pf.flows._generate_tools_meta(source, source_name="summarize_text_content.jinja2")
assert tools_meta == {"code": {}, "package": {}}
assert re.search(r"Meta file '.*' can not be found.", tools_error.pop("summarize_text_content.jinja2", ""))
assert tools_error == {}
tools_meta, tools_error = pf.flows._generate_tools_meta(source, source_name="fetch_text_content_from_url.py")
assert tools_meta == {
"code": {
"fetch_text_content_from_url.py": {
"function": "fetch_text_content_from_url",
"inputs": {"url": {"type": ["string"]}},
"source": os.path.join("..", "external_files", "fetch_text_content_from_url.py"),
"type": "python",
},
},
"package": {},
}
assert tools_error == {}
@pytest.mark.skip(reason="It will fail in CI for some reasons. Still need to investigate.")
def test_flow_generate_tools_meta_timeout(self, pf) -> None:
source = f"{FLOWS_DIR}/web_classification_invalid"
for tools_meta, tools_error in [
pf.flows._generate_tools_meta(source, timeout=1),
# There is no built-in method to forcefully stop a running thread in Python
# because abruptly stopping a thread can cause issues like resource leaks,
# deadlocks, or inconsistent states.
# Caller (VSCode extension) will handle the timeout error.
# pf.flows._generate_tools_meta(source, source_name="convert_to_dict.py", timeout=1),
]:
assert tools_meta == {"code": {}, "package": {}}
assert tools_error
for error in tools_error.values():
assert "timeout" in error
def test_flow_generate_tools_meta_with_pkg_tool_with_custom_strong_type_connection(self, pf) -> None:
source = f"{FLOWS_DIR}/flow_with_package_tool_with_custom_strong_type_connection"
tools_meta, tools_error = pf.flows._generate_tools_meta(source)
assert tools_error == {}
assert tools_meta["code"] == {}
assert tools_meta["package"] == {
"my_tool_package.tools.my_tool_1.my_tool": {
"function": "my_tool",
"inputs": {
"connection": {
"type": ["CustomConnection"],
"custom_type": ["MyFirstConnection", "MySecondConnection"],
},
"input_text": {"type": ["string"]},
},
"module": "my_tool_package.tools.my_tool_1",
"name": "My First Tool",
"description": "This is my first tool",
"type": "python",
"package": "test-custom-tools",
"package_version": "0.0.2",
},
"my_tool_package.tools.my_tool_2.MyTool.my_tool": {
"class_name": "MyTool",
"function": "my_tool",
"inputs": {
"connection": {"type": ["CustomConnection"], "custom_type": ["MySecondConnection"]},
"input_text": {"type": ["string"]},
},
"module": "my_tool_package.tools.my_tool_2",
"name": "My Second Tool",
"description": "This is my second tool",
"type": "python",
"package": "test-custom-tools",
"package_version": "0.0.2",
},
}
def test_flow_generate_tools_meta_with_script_tool_with_custom_strong_type_connection(self, pf) -> None:
source = f"{FLOWS_DIR}/flow_with_script_tool_with_custom_strong_type_connection"
tools_meta, tools_error = pf.flows._generate_tools_meta(source)
assert tools_error == {}
assert tools_meta["package"] == {}
assert tools_meta["code"] == {
"my_script_tool.py": {
"function": "my_tool",
"inputs": {"connection": {"type": ["CustomConnection"]}, "input_param": {"type": ["string"]}},
"source": "my_script_tool.py",
"type": "python",
}
}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/e2etests/test_executable.py | import subprocess
import sys
import tempfile
from pathlib import Path
import mock
import pytest
from .test_cli import run_pf_command
FLOWS_DIR = "./tests/test_configs/flows"
RUNS_DIR = "./tests/test_configs/runs"
CONNECTIONS_DIR = "./tests/test_configs/connections"
DATAS_DIR = "./tests/test_configs/datas"
@pytest.mark.usefixtures("use_secrets_config_file", "setup_local_connection", "install_custom_tool_pkg")
@pytest.mark.cli_test
@pytest.mark.e2etest
class TestExecutable:
@pytest.mark.skipif(
sys.platform == "win32" or sys.platform == "darwin",
reason="Raise Exception: Process terminated with exit code 4294967295",
)
def test_flow_build_executable(self):
source = f"{FLOWS_DIR}/web_classification/flow.dag.yaml"
target = "promptflow._sdk.operations._flow_operations.FlowOperations._run_pyinstaller"
with mock.patch(target) as mocked:
mocked.return_value = None
with tempfile.TemporaryDirectory() as temp_dir:
run_pf_command(
"flow",
"build",
"--source",
source,
"--output",
temp_dir,
"--format",
"executable",
)
# Start the Python script as a subprocess
app_file = Path(temp_dir, "app.py").as_posix()
process = subprocess.Popen(["python", app_file], stderr=subprocess.PIPE)
try:
# Wait for a specified time (in seconds)
wait_time = 5
process.wait(timeout=wait_time)
if process.returncode == 0:
pass
else:
raise Exception(
f"Process terminated with exit code {process.returncode}, "
f"{process.stderr.read().decode('utf-8')}"
)
except (subprocess.TimeoutExpired, KeyboardInterrupt):
pass
finally:
# Kill the process
process.terminate()
process.wait() # Ensure the process is fully terminated
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_flow.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from pathlib import Path
import pytest
from marshmallow import ValidationError
from promptflow import load_flow
from promptflow._sdk.entities._eager_flow import EagerFlow
from promptflow._sdk.entities._flow import ProtectedFlow
from promptflow.exceptions import UserErrorException
FLOWS_DIR = Path("./tests/test_configs/flows")
EAGER_FLOWS_DIR = Path("./tests/test_configs/eager_flows")
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestRun:
@pytest.mark.parametrize(
"kwargs",
[
{"source": EAGER_FLOWS_DIR / "simple_with_yaml"},
{"source": EAGER_FLOWS_DIR / "simple_with_yaml" / "flow.dag.yaml"},
{"source": EAGER_FLOWS_DIR / "simple_without_yaml" / "entry.py", "entry": "my_flow"},
{"source": EAGER_FLOWS_DIR / "multiple_entries" / "entry1.py", "entry": "my_flow1"},
{"source": EAGER_FLOWS_DIR / "multiple_entries" / "entry1.py", "entry": "my_flow2"},
],
)
def test_eager_flow_load(self, kwargs):
flow = load_flow(**kwargs)
assert isinstance(flow, EagerFlow)
@pytest.mark.parametrize(
"kwargs",
[
{"source": FLOWS_DIR / "print_input_flow"},
{"source": FLOWS_DIR / "print_input_flow" / "flow.dag.yaml"},
],
)
def test_dag_flow_load(self, kwargs):
flow = load_flow(**kwargs)
assert isinstance(flow, ProtectedFlow)
def test_flow_load_advanced(self):
flow = load_flow(source=EAGER_FLOWS_DIR / "flow_with_environment")
assert isinstance(flow, EagerFlow)
assert flow._data["environment"] == {"python_requirements_txt": "requirements.txt"}
@pytest.mark.parametrize(
"kwargs, error_message, exception_type",
[
(
{
"source": EAGER_FLOWS_DIR / "multiple_entries" / "entry1.py",
},
"Entry function is not specified",
UserErrorException,
),
(
{
"source": EAGER_FLOWS_DIR / "multiple_entries" / "not_exist.py",
},
"does not exist",
UserErrorException,
),
(
{
"source": EAGER_FLOWS_DIR / "invalid_no_path",
},
"{'path': ['Missing data for required field.']}",
ValidationError,
),
(
{
"source": EAGER_FLOWS_DIR / "invalid_illegal_path",
},
"Can't find directory or file in resolved absolute path:",
ValidationError,
),
(
{"source": EAGER_FLOWS_DIR / "invalid_extra_fields_nodes"},
"{'nodes': ['Unknown field.']}",
ValidationError,
),
],
)
def test_flow_load_invalid(self, kwargs, error_message, exception_type):
with pytest.raises(exception_type) as e:
load_flow(**kwargs)
assert error_message in str(e.value)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_config.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from pathlib import Path
import pytest
from promptflow._sdk._configuration import Configuration, InvalidConfigValue
from promptflow._sdk._constants import FLOW_DIRECTORY_MACRO_IN_CONFIG
from promptflow._sdk._utils import ClientUserAgentUtil
CONFIG_DATA_ROOT = Path(__file__).parent.parent.parent / "test_configs" / "configs"
@pytest.fixture
def config():
return Configuration.get_instance()
@pytest.mark.unittest
class TestConfig:
def test_set_config(self, config):
config.set_config("a.b.c.test_key", "test_value")
assert config.get_config("a.b.c.test_key") == "test_value"
# global config may contain other keys
assert config.config["a"] == {"b": {"c": {"test_key": "test_value"}}}
def test_get_config(self, config):
config.set_config("test_key", "test_value")
assert config.get_config("test_key") == "test_value"
def test_get_or_set_installation_id(self, config):
user_id = config.get_or_set_installation_id()
assert user_id is not None
def test_config_instance(self, config):
new_config = Configuration.get_instance()
assert new_config is config
def test_set_invalid_run_output_path(self, config: Configuration) -> None:
expected_error_message = (
"Cannot specify flow directory as run output path; "
"if you want to specify run output path under flow directory, "
"please use its child folder, e.g. '${flow_directory}/.runs'."
)
# directly set
with pytest.raises(InvalidConfigValue) as e:
config.set_config(key=Configuration.RUN_OUTPUT_PATH, value=FLOW_DIRECTORY_MACRO_IN_CONFIG)
assert expected_error_message in str(e)
# override
with pytest.raises(InvalidConfigValue) as e:
Configuration(overrides={Configuration.RUN_OUTPUT_PATH: FLOW_DIRECTORY_MACRO_IN_CONFIG})
assert expected_error_message in str(e)
def test_ua_set_load(self, config: Configuration) -> None:
config.set_config(key=Configuration.USER_AGENT, value="test/1.0.0")
user_agent = config.get_user_agent()
assert user_agent == "PFCustomer_test/1.0.0"
# load empty ua won't break
config.set_config(key=Configuration.USER_AGENT, value="")
user_agent = config.get_user_agent()
assert user_agent == ""
# empty ua won't add to context
ClientUserAgentUtil.update_user_agent_from_config()
user_agent = ClientUserAgentUtil.get_user_agent()
# in test environment, user agent may contain promptflow-local-serving/0.0.1 test-user-agent
assert "test/1.0.0" not in user_agent
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_connection.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from pathlib import Path
from unittest.mock import patch
import pytest
from promptflow._cli._pf._connection import validate_and_interactive_get_secrets
from promptflow._sdk._constants import SCRUBBED_VALUE, CustomStrongTypeConnectionConfigs
from promptflow._sdk._load_functions import _load_env_to_connection
from promptflow._sdk.entities._connection import (
AzureContentSafetyConnection,
AzureOpenAIConnection,
CognitiveSearchConnection,
CustomConnection,
FormRecognizerConnection,
OpenAIConnection,
QdrantConnection,
SerpConnection,
WeaviateConnection,
_Connection,
)
from promptflow._utils.yaml_utils import load_yaml
from promptflow.exceptions import UserErrorException
TEST_ROOT = Path(__file__).parent.parent.parent
CONNECTION_ROOT = TEST_ROOT / "test_configs/connections"
@pytest.mark.unittest
class TestConnection:
@pytest.mark.parametrize(
"file_name, class_name, init_param, expected",
[
(
"azure_openai_connection.yaml",
AzureOpenAIConnection,
{
"name": "my_azure_open_ai_connection",
"api_type": "azure",
"api_version": "2023-07-01-preview",
"api_key": "<to-be-replaced>",
"api_base": "aoai-api-endpoint",
},
{
"module": "promptflow.connections",
"type": "azure_open_ai",
},
),
(
"openai_connection.yaml",
OpenAIConnection,
{
"name": "my_open_ai_connection",
"api_key": "<to-be-replaced>",
"organization": "org",
},
{
"module": "promptflow.connections",
"type": "open_ai",
},
),
(
"openai_connection_base_url.yaml",
OpenAIConnection,
{
"name": "my_open_ai_connection",
"api_key": "<to-be-replaced>",
"organization": "org",
"base_url": "custom_base_url",
},
{
"module": "promptflow.connections",
"type": "open_ai",
},
),
(
"custom_connection.yaml",
CustomConnection,
{
"name": "my_custom_connection",
"configs": {"key1": "test1"},
"secrets": {"key2": "test2"},
},
{
"module": "promptflow.connections",
"type": "custom",
},
),
(
"azure_content_safety_connection.yaml",
AzureContentSafetyConnection,
{
"name": "my_azure_content_safety_connection",
"api_key": "<to-be-replaced>",
"endpoint": "endpoint",
"api_version": "2023-04-30-preview",
"api_type": "Content Safety",
},
{
"module": "promptflow.connections",
"type": "azure_content_safety",
},
),
(
"cognitive_search_connection.yaml",
CognitiveSearchConnection,
{
"name": "my_cognitive_search_connection",
"api_key": "<to-be-replaced>",
"api_base": "endpoint",
"api_version": "2023-07-01-Preview",
},
{
"module": "promptflow.connections",
"type": "cognitive_search",
},
),
(
"serp_connection.yaml",
SerpConnection,
{
"name": "my_serp_connection",
"api_key": "<to-be-replaced>",
},
{
"module": "promptflow.connections",
"type": "serp",
},
),
(
"form_recognizer_connection.yaml",
FormRecognizerConnection,
{
"name": "my_form_recognizer_connection",
"api_key": "<to-be-replaced>",
"endpoint": "endpoint",
"api_version": "2023-07-31",
"api_type": "Form Recognizer",
},
{
"module": "promptflow.connections",
"type": "form_recognizer",
},
),
(
"qdrant_connection.yaml",
QdrantConnection,
{
"name": "my_qdrant_connection",
"api_key": "<to-be-replaced>",
"api_base": "endpoint",
},
{
"module": "promptflow_vectordb.connections",
"type": "qdrant",
},
),
(
"weaviate_connection.yaml",
WeaviateConnection,
{
"name": "my_weaviate_connection",
"api_key": "<to-be-replaced>",
"api_base": "endpoint",
},
{
"module": "promptflow_vectordb.connections",
"type": "weaviate",
},
),
],
)
def test_connection_load_dump(self, file_name, class_name, init_param, expected):
conn = _Connection._load(data=load_yaml(CONNECTION_ROOT / file_name))
expected = {**expected, **init_param}
assert dict(conn._to_dict()) == expected
assert class_name(**init_param)._to_dict() == expected
def test_connection_load_from_env(self):
connection = _load_env_to_connection(source=CONNECTION_ROOT / ".env", params_override=[{"name": "env_conn"}])
assert connection._to_dict() == {
"name": "env_conn",
"module": "promptflow.connections",
"type": "custom",
"configs": {},
"secrets": {"aaa": "bbb", "ccc": "ddd"},
}
assert (
connection.__str__()
== """name: env_conn
module: promptflow.connections
type: custom
configs: {}
secrets:
aaa: bbb
ccc: ddd
"""
)
def test_connection_load_from_env_file_bad_case(self):
# Test file not found
with pytest.raises(FileNotFoundError) as e:
_load_env_to_connection(source=CONNECTION_ROOT / "mock.env", params_override=[{"name": "env_conn"}])
assert "not found" in str(e.value)
# Test file empty
with pytest.raises(Exception) as e:
_load_env_to_connection(source=CONNECTION_ROOT / "empty.env", params_override=[{"name": "env_conn"}])
assert "Load nothing" in str(e.value)
def test_to_execution_connection_dict(self):
# Assert custom connection build
connection = CustomConnection(name="test_connection", configs={"a": "1"}, secrets={"b": "2"})
assert connection._to_execution_connection_dict() == {
"module": "promptflow.connections",
"secret_keys": ["b"],
"type": "CustomConnection",
"value": {"a": "1", "b": "2"},
}
# Assert strong type - AzureOpenAI
connection = AzureOpenAIConnection(
name="test_connection_1",
type="AzureOpenAI",
api_key="test_key",
api_base="test_base",
api_type="azure",
api_version="2023-07-01-preview",
)
assert connection._to_execution_connection_dict() == {
"module": "promptflow.connections",
"secret_keys": ["api_key"],
"type": "AzureOpenAIConnection",
"value": {
"api_base": "test_base",
"api_key": "test_key",
"api_type": "azure",
"api_version": "2023-07-01-preview",
},
}
# Assert strong type - OpenAI
connection = OpenAIConnection(
name="test_connection_1",
type="AzureOpenAI",
api_key="test_key",
organization="test_org",
)
assert connection._to_execution_connection_dict() == {
"module": "promptflow.connections",
"secret_keys": ["api_key"],
"type": "OpenAIConnection",
"value": {"api_key": "test_key", "organization": "test_org"},
}
def test_validate_and_interactive_get_secrets(self):
# Path 1: Create
connection = CustomConnection(
name="test_connection",
secrets={"key1": SCRUBBED_VALUE, "key2": "", "key3": "<no-change>", "key4": "<user-input>", "key5": "**"},
)
with patch("promptflow._cli._pf._connection.get_secret_input", new=lambda prompt: "test_value"):
validate_and_interactive_get_secrets(connection, is_update=False)
assert connection.secrets == {
"key1": "test_value",
"key2": "test_value",
"key3": "test_value",
"key4": "test_value",
"key5": "test_value",
}
# Path 2: Update
# Scrubbed value will be filled in _validate_and_encrypt_secrets for update, so no changes here.
connection = CustomConnection(
name="test_connection",
secrets={"key1": SCRUBBED_VALUE, "key2": "", "key3": "<no-change>", "key4": "<user-input>", "key5": "**"},
)
with patch("promptflow._cli._pf._connection.get_secret_input", new=lambda prompt: "test_value"):
validate_and_interactive_get_secrets(connection, is_update=True)
assert connection.secrets == {
"key1": SCRUBBED_VALUE,
"key2": "",
"key3": "<no-change>",
"key4": "test_value",
"key5": "**",
}
def test_validate_and_encrypt_secrets(self):
# Path 1: Create
connection = CustomConnection(
name="test_connection",
secrets={"key1": SCRUBBED_VALUE, "key2": "", "key3": "<no-change>", "key4": "<user-input>", "key5": "**"},
)
with pytest.raises(Exception) as e:
connection._validate_and_encrypt_secrets()
assert "secrets ['key1', 'key2', 'key3', 'key4', 'key5'] value invalid, please fill them" in str(e.value)
# Path 2: Update
connection._secrets = {"key1": "val1", "key2": "val2", "key4": "val4", "key5": "*"}
# raise error for key3 as original value missing.
# raise error for key5 as original value still scrubbed.
# raise error for key4 even if it was in _secrets, because it requires <user-input>.
with pytest.raises(Exception) as e:
connection._validate_and_encrypt_secrets()
assert "secrets ['key3', 'key4', 'key5'] value invalid, please fill them" in str(e.value)
def test_convert_to_custom_strong_type(self, install_custom_tool_pkg):
module_name = "my_tool_package.tools.my_tool_2"
custom_conn_type = "MyFirstConnection"
import importlib
module = importlib.import_module(module_name)
# Connection created by custom strong type connection template for package tool
connection = CustomConnection(
name="test_connection",
configs={
"a": "1",
CustomStrongTypeConnectionConfigs.PROMPTFLOW_MODULE_KEY: module_name,
CustomStrongTypeConnectionConfigs.PROMPTFLOW_TYPE_KEY: custom_conn_type,
},
secrets={"b": "2"},
)
res = connection._convert_to_custom_strong_type()
assert isinstance(res, module.MyFirstConnection)
assert res.secrets == {"b": "2"}
# Connection created by custom connection template for script tool
connection = CustomConnection(name="test_connection", configs={"a": "1"}, secrets={"b": "2"})
res = connection._convert_to_custom_strong_type(module=module, to_class=custom_conn_type)
assert isinstance(res, module.MyFirstConnection)
assert res.configs == {"a": "1"}
# Connection created with custom connection type in portal for package tool
connection._convert_to_custom_strong_type(module=module_name, to_class=custom_conn_type)
assert isinstance(res, module.MyFirstConnection)
assert res.configs == {"a": "1"}
# Invalid module
module_name = "not_existing_module"
with pytest.raises(ModuleNotFoundError, match=r".*No module named 'not_existing_module'*"):
connection._convert_to_custom_strong_type(module=module_name, to_class=custom_conn_type)
module_name = None
with pytest.raises(
UserErrorException,
match=r".*Failed to convert to custom strong type connection because of invalid module or class*",
):
connection._convert_to_custom_strong_type(module=module_name, to_class=custom_conn_type)
custom_conn_type = None
with pytest.raises(
UserErrorException,
match=r".*Failed to convert to custom strong type connection because of invalid module or class*",
):
connection._convert_to_custom_strong_type(module=module_name, to_class=custom_conn_type)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_run.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import copy
import uuid
from pathlib import Path
from unittest.mock import patch
import pytest
from marshmallow import ValidationError
from promptflow._sdk._constants import BASE_PATH_CONTEXT_KEY, NODES
from promptflow._sdk._errors import InvalidFlowError
from promptflow._sdk._load_functions import load_run
from promptflow._sdk._pf_client import PFClient
from promptflow._sdk._run_functions import create_yaml_run
from promptflow._sdk._submitter import RunSubmitter, overwrite_variant, variant_overwrite_context
from promptflow._sdk.entities import Run
from promptflow._sdk.operations._local_storage_operations import LocalStorageOperations
from promptflow._utils.yaml_utils import load_yaml
PROMOTFLOW_ROOT = Path(__file__) / "../../../.."
FLOWS_DIR = Path("./tests/test_configs/flows")
RUNS_DIR = Path("./tests/test_configs/runs")
DATAS_DIR = Path("./tests/test_configs/datas")
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestRun:
def test_overwrite_variant_context(self):
with variant_overwrite_context(
flow_path=FLOWS_DIR / "web_classification", tuning_node="summarize_text_content", variant="variant_0"
) as flow:
with open(flow.path) as f:
flow_dag = load_yaml(f)
node_name_2_node = {node["name"]: node for node in flow_dag[NODES]}
node = node_name_2_node["summarize_text_content"]
assert node["inputs"]["temperature"] == "0.2"
def test_overwrite_connections(self):
with variant_overwrite_context(
flow_path=FLOWS_DIR / "web_classification",
connections={"classify_with_llm": {"connection": "azure_open_ai", "deployment_name": "gpt-35-turbo"}},
) as flow:
with open(flow.path) as f:
flow_dag = load_yaml(f)
node_name_2_node = {node["name"]: node for node in flow_dag[NODES]}
node = node_name_2_node["classify_with_llm"]
assert node["connection"] == "azure_open_ai"
assert node["inputs"]["deployment_name"] == "gpt-35-turbo"
@pytest.mark.parametrize(
"connections, error_message",
[
(
{
"classify_with_llm": {
"connection": "azure_open_ai",
"deployment_name": "gpt-35-turbo",
"unsupported": 1,
}
},
"Unsupported llm connection overwrite keys",
),
("str", "Invalid connections overwrite format: str"),
({"not_exist": 1}, "Node not_exist not found in flow"),
({"classify_with_llm": 1}, "Invalid connection overwrite format: 1, only dict is supported."),
],
)
def test_overwrite_connections_invalid(self, connections, error_message):
with pytest.raises(InvalidFlowError) as e:
with variant_overwrite_context(
flow_path=FLOWS_DIR / "web_classification",
connections=connections,
):
pass
assert error_message in str(e.value)
def test_load_run(self):
input_dict = {
"data": (DATAS_DIR / "webClassification1.jsonl").resolve().as_posix(),
"column_mapping": {"context": "${data.context}"},
"flow": (FLOWS_DIR / "web_classification").resolve().as_posix(),
}
bulk_run = Run._load_from_dict(
data=input_dict, context={BASE_PATH_CONTEXT_KEY: FLOWS_DIR}, additional_message=""
)
assert isinstance(bulk_run, Run)
def test_dot_env_resolve(self):
run_id = str(uuid.uuid4())
source = f"{RUNS_DIR}/sample_bulk_run.yaml"
run = load_run(source=source, params_override=[{"name": run_id}])
assert run.environment_variables == {"FOO": "BAR"}
def test_run_invalid_flow_path(self):
run_id = str(uuid.uuid4())
source = f"{RUNS_DIR}/bulk_run_invalid_flow_path.yaml"
with pytest.raises(ValidationError) as e:
load_run(source=source, params_override=[{"name": run_id}])
assert "Can't find directory or file in resolved absolute path:" in str(e.value)
def test_run_invalid_remote_flow(self):
run_id = str(uuid.uuid4())
source = f"{RUNS_DIR}/bulk_run_invalid_remote_flow_str.yaml"
with pytest.raises(ValidationError) as e:
load_run(source=source, params_override=[{"name": run_id}])
assert "Invalid remote flow path. Currently only azureml:<flow-name> is supported" in str(e.value)
def test_data_not_exist_validation_error(self):
source = f"{RUNS_DIR}/sample_bulk_run.yaml"
with pytest.raises(ValidationError) as e:
load_run(source=source, params_override=[{"data": "not_exist"}])
assert "Can't find directory or file" in str(e.value)
assert "Invalid remote path." in str(e.value)
@pytest.mark.parametrize(
"source, error_msg",
[
(f"{RUNS_DIR}/illegal/non_exist_data.yaml", "Can't find directory or file"),
],
)
def test_invalid_yaml(self, source, error_msg):
with pytest.raises(ValidationError) as e:
create_yaml_run(source=source)
assert error_msg in str(e.value)
def test_run_bulk_invalid_params(self, pf):
# Test if function raises FileNotFoundError
with pytest.raises(FileNotFoundError):
pf.run(flow="invalid_path", data="fake_data")
with pytest.raises(FileNotFoundError):
pf.run(flow="invalid_path", data="fake_data", batch_run="fake_run")
def test_overwrite_variant(self):
flow_dag = {
"nodes": [
{
"name": "node1",
"use_variants": True,
"variant_id": "default",
"inputs": {
"param1": "value1",
"param2": "value2",
},
},
],
"node_variants": {
"node1": {
"default_variant_id": "variant1",
"variants": {
"variant1": {
"node": {
"inputs": {
"param1": "value1_variant1",
"param2": "value2_variant1",
},
},
},
},
},
},
}
# Test if function raises InvalidFlowError
with pytest.raises(InvalidFlowError):
overwrite_variant(flow_dag, "node3", "variant1")
with pytest.raises(InvalidFlowError):
overwrite_variant(flow_dag, "node1", "variant3")
# Test if function overwrites variant correctly
dag = copy.deepcopy(flow_dag)
overwrite_variant(dag, "node1", "variant1")
assert dag["nodes"][0]["inputs"]["param1"] == "value1_variant1"
assert dag["nodes"][0]["inputs"]["param2"] == "value2_variant1"
# test overwrite default variant
dag = copy.deepcopy(flow_dag)
overwrite_variant(dag)
assert dag["nodes"][0]["inputs"]["param1"] == "value1_variant1"
assert dag["nodes"][0]["inputs"]["param2"] == "value2_variant1"
@patch("promptflow._sdk.operations._run_operations.RunOperations.update")
def test_submit(self, mock_update):
# Define input parameters
flow_path = f"{FLOWS_DIR}/web_classification"
client = PFClient()
run_submitter = RunSubmitter(client.runs)
run = Run(
name=str(uuid.uuid4()),
flow=Path(flow_path),
data=f"{DATAS_DIR}/webClassification3.jsonl",
)
# Submit run
run_submitter.submit(run)
# Check if Run.update method was called
mock_update.assert_called_once()
def test_flow_run_with_non_english_inputs(self, pf):
flow_path = f"{FLOWS_DIR}/flow_with_non_english_input"
data = f"{FLOWS_DIR}/flow_with_non_english_input/data.jsonl"
run = pf.run(flow=flow_path, data=data, column_mapping={"text": "${data.text}"})
local_storage = LocalStorageOperations(run=run)
# assert non english in output.jsonl
output_jsonl_path = local_storage._outputs_path
with open(output_jsonl_path, "r", encoding="utf-8") as f:
outputs_text = f.readlines()
assert outputs_text == [
'{"line_number": 0, "output": "Hello 123 日本語"}\n',
'{"line_number": 1, "output": "World 123 日本語"}\n',
]
# assert non english in memory
outputs = local_storage.load_outputs()
assert outputs == {"output": ["Hello 123 日本語", "World 123 日本語"]}
@pytest.mark.usefixtures("enable_logger_propagate")
def test_flow_run_with_unknown_field(self, caplog):
run_yaml = Path(RUNS_DIR) / "sample_bulk_run.yaml"
load_run(source=run_yaml, params_override=[{"unknown_field": "unknown_value"}])
assert "Unknown fields found" in caplog.text
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_utils.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import argparse
import datetime
import importlib
import json
import os
import shutil
import sys
import tempfile
import threading
import time
from pathlib import Path
from unittest.mock import patch
import mock
import pandas as pd
import pytest
from requests import Response
from promptflow._cli._params import AppendToDictAction
from promptflow._cli._utils import (
_build_sorted_column_widths_tuple_list,
_calculate_column_widths,
list_of_dict_to_nested_dict,
)
from promptflow._constants import LAST_CHECK_TIME, PF_VERSION_CHECK
from promptflow._sdk._constants import HOME_PROMPT_FLOW_DIR, PROMPT_FLOW_HOME_DIR_ENV_VAR
from promptflow._sdk._errors import GenerateFlowToolsJsonError
from promptflow._sdk._telemetry.logging_handler import get_scrubbed_cloud_role
from promptflow._sdk._utils import (
_generate_connections_dir,
decrypt_secret_value,
encrypt_secret_value,
generate_flow_tools_json,
override_connection_config_with_environment_variable,
refresh_connections_dir,
resolve_connections_environment_variable_reference,
snake_to_camel,
)
from promptflow._utils.load_data import load_data
from promptflow._utils.retry_utils import http_retry_wrapper, retry
from promptflow._utils.version_hint_utils import check_latest_version
TEST_ROOT = Path(__file__).parent.parent.parent
CONNECTION_ROOT = TEST_ROOT / "test_configs/connections"
@pytest.mark.unittest
class TestUtils:
def test_encrypt_decrypt_value(self):
test_value = "test"
encrypted = encrypt_secret_value(test_value)
assert decrypt_secret_value("mock", encrypted) == test_value
def test_snake_to_camel(self):
assert snake_to_camel("test_snake_case") == "TestSnakeCase"
assert snake_to_camel("TestSnakeCase") == "TestSnakeCase"
def test_sqlite_retry(self, capfd) -> None:
from sqlalchemy.exc import OperationalError
from promptflow._sdk._orm.retry import sqlite_retry
@sqlite_retry
def mock_sqlite_op() -> None:
print("sqlite op...")
raise OperationalError("statement", "params", "orig")
# it will finally raise an OperationalError
with pytest.raises(OperationalError):
mock_sqlite_op()
# assert function execution time from stdout
out, _ = capfd.readouterr()
assert out.count("sqlite op...") == 3
def test_resolve_connections_environment_variable_reference(self):
connections = {
"test_connection": {
"type": "AzureOpenAIConnection",
"value": {
"api_key": "${env:AZURE_OPENAI.API_KEY}",
"api_base": "${env:AZURE_OPENAI_API_BASE}",
},
},
"test_custom_connection": {
"type": "CustomConnection",
"value": {"key": "${env:CUSTOM_KEY}", "key2": "value2"},
},
}
with mock.patch.dict(
os.environ, {"AZURE_OPENAI.API_KEY": "KEY", "AZURE_OPENAI_API_BASE": "BASE", "CUSTOM_KEY": "CUSTOM_VALUE"}
):
resolve_connections_environment_variable_reference(connections)
assert connections["test_connection"]["value"]["api_key"] == "KEY"
assert connections["test_connection"]["value"]["api_base"] == "BASE"
assert connections["test_custom_connection"]["value"]["key"] == "CUSTOM_VALUE"
# test bad cases
connections = {
"test_connection": {
"type": "AzureOpenAIConnection",
"value": {"none_value": None, "integer_value": 1, "float_value": 1.0, "dict_value": {}},
},
}
resolve_connections_environment_variable_reference(connections)
assert connections["test_connection"]["value"] == {
"none_value": None,
"integer_value": 1,
"float_value": 1.0,
"dict_value": {},
}
def test_override_connection_config_with_environment_variable(self):
connections = {
"test_connection": {
"type": "AzureOpenAIConnection",
"value": {
"api_key": "KEY",
"api_base": "https://gpt-test-eus.openai.azure.com/",
},
},
"test_custom_connection": {
"type": "CustomConnection",
"value": {"key": "value1", "key2": "value2"},
},
}
with mock.patch.dict(
os.environ, {"TEST_CONNECTION_API_BASE": "BASE", "TEST_CUSTOM_CONNECTION_KEY": "CUSTOM_VALUE"}
):
override_connection_config_with_environment_variable(connections)
assert connections["test_connection"]["value"]["api_key"] == "KEY"
assert connections["test_connection"]["value"]["api_base"] == "BASE"
assert connections["test_custom_connection"]["value"]["key"] == "CUSTOM_VALUE"
assert connections["test_custom_connection"]["value"]["key2"] == "value2"
def test_generate_flow_tools_json(self) -> None:
# call twice to ensure system path won't be affected during generation
for _ in range(2):
flow_src_path = "./tests/test_configs/flows/flow_with_sys_inject"
with tempfile.TemporaryDirectory() as temp_dir:
flow_dst_path = os.path.join(temp_dir, "flow_with_sys_inject")
shutil.copytree(flow_src_path, flow_dst_path)
flow_tools_json = generate_flow_tools_json(flow_dst_path, dump=False)
groundtruth = {
"hello.py": {
"type": "python",
"inputs": {
"input1": {
"type": [
"string",
],
},
},
"source": "hello.py",
"function": "my_python_tool",
}
}
assert flow_tools_json["code"] == groundtruth
def test_generate_flow_tools_json_expecting_fail(self) -> None:
flow_path = "./tests/test_configs/flows/flow_with_invalid_import"
with pytest.raises(GenerateFlowToolsJsonError) as e:
generate_flow_tools_json(flow_path, dump=False)
assert "Generate meta failed, detail error(s):" in str(e.value)
# raise_error = False
flow_tools_json = generate_flow_tools_json(flow_path, dump=False, raise_error=False)
assert len(flow_tools_json["code"]) == 0
@pytest.mark.parametrize(
"python_path, env_hash",
[
("D:\\Tools\\Anaconda3\\envs\\pf\\python.exe", ("a9620c3cdb7ccf3ec9f4005e5b19c12d1e1fef80")),
("/Users/fake_user/anaconda3/envs/pf/bin/python3.10", ("e3f33eadd9be376014eb75a688930930ca83c056")),
],
)
def test_generate_connections_dir(self, python_path, env_hash):
expected_result = (HOME_PROMPT_FLOW_DIR / "envs" / env_hash / "connections").resolve()
with patch.object(sys, "executable", python_path):
result = _generate_connections_dir()
assert result == expected_result
def test_refresh_connections_dir(self):
from promptflow._core.tools_manager import collect_package_tools_and_connections
tools, specs, templates = collect_package_tools_and_connections()
refresh_connections_dir(specs, templates)
conn_dir = _generate_connections_dir()
assert len(os.listdir(conn_dir)) > 0, "No files were generated"
@pytest.mark.parametrize("concurrent_count", [1, 2, 4, 8])
def test_concurrent_execution_of_refresh_connections_dir(self, concurrent_count):
threads = []
# Create and start threads
for _ in range(concurrent_count):
thread = threading.Thread(
target=lambda: refresh_connections_dir(connection_spec_files=[], connection_template_yamls=[])
)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
def test_concurrent_hint_for_update(self):
def mock_check_latest_version():
time.sleep(5)
check_latest_version()
with patch("promptflow._utils.version_hint_utils.datetime") as mock_datetime, patch(
"promptflow._utils.version_hint_utils.check_latest_version", side_effect=mock_check_latest_version
):
from promptflow._sdk._telemetry import monitor_operation
class HintForUpdate:
@monitor_operation(activity_name="pf.flows.test")
def hint_func(self):
return
current_time = datetime.datetime.now()
mock_datetime.datetime.now.return_value = current_time
mock_datetime.datetime.strptime.return_value = current_time - datetime.timedelta(days=8)
mock_datetime.timedelta.return_value = datetime.timedelta(days=7)
HintForUpdate().hint_func()
assert Path(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK).exists()
with open(HOME_PROMPT_FLOW_DIR / PF_VERSION_CHECK, "r") as f:
cached_versions = json.load(f)
# since mock_check_latest_version is a demon thread, it will exit when main thread complete, so
# LAST_CHECK_TIME won't be updated since sleep 5s
assert LAST_CHECK_TIME not in cached_versions or cached_versions[LAST_CHECK_TIME] != str(current_time)
@pytest.mark.parametrize(
"data_path",
[
"./tests/test_configs/datas/load_data_cases/colors.csv",
"./tests/test_configs/datas/load_data_cases/colors.json",
"./tests/test_configs/datas/load_data_cases/colors.jsonl",
"./tests/test_configs/datas/load_data_cases/colors.tsv",
"./tests/test_configs/datas/load_data_cases/colors.parquet",
],
)
def test_load_data(self, data_path: str) -> None:
# for csv and tsv format, all columns will be string;
# for rest, integer will be int and float will be float
is_string = "csv" in data_path or "tsv" in data_path
df = load_data(data_path)
assert len(df) == 3
assert df[0]["name"] == "Red"
assert isinstance(df[0]["id_text"], str)
assert df[0]["id_text"] == "1.0"
if is_string:
assert isinstance(df[0]["id_int"], str)
assert df[0]["id_int"] == "1"
assert isinstance(df[0]["id_float"], str)
assert df[0]["id_float"] == "1.0"
else:
assert isinstance(df[0]["id_int"], int)
assert df[0]["id_int"] == 1
assert isinstance(df[0]["id_float"], float)
assert df[0]["id_float"] == 1.0
@pytest.mark.parametrize(
"data_path",
[
"./tests/test_configs/datas/load_data_cases/10k.jsonl",
"./tests/test_configs/datas/load_data_cases/10k",
],
)
def test_load_10k_data(self, data_path: str) -> None:
df = load_data(data_path)
assert len(df) == 10000
# specify max_rows_count
max_rows_count = 5000
head_rows = load_data(data_path, max_rows_count=max_rows_count)
assert len(head_rows) == max_rows_count
assert head_rows == df[:max_rows_count]
@pytest.mark.parametrize(
"script_name, expected_result",
[
("pfs", "pfs"),
("pfutil.py", "pfutil.py"),
("pf", "pf"),
("pfazure", "pfazure"),
("pf.exe", "pf.exe"),
("pfazure.exe", "pfazure.exe"),
("app.py", "app.py"),
("python -m unittest", "python -m unittest"),
("pytest", "pytest"),
("gunicorn", "gunicorn"),
("ipykernel_launcher.py", "ipykernel_launcher.py"),
("jupyter-notebook", "jupyter-notebook"),
("jupyter-lab", "jupyter-lab"),
("python", "python"),
("Unknown Application", "Unknown Application"),
("unknown_script.py", "***.py"),
("path/to/unknown_script.py", "***.py"),
(r"path\to\unknown_script.py", "***.py"),
('invalid_chars_\\/:*?"<>|', "***"),
],
)
def test_get_scrubbed_cloud_role(self, script_name, expected_result):
with mock.patch("sys.argv", [script_name]):
assert get_scrubbed_cloud_role() == expected_result
def test_configure_pf_home_dir(self, tmpdir) -> None:
from promptflow._sdk import _constants
custom_pf_home_dir_path = Path(tmpdir / ".promptflow").resolve()
assert not custom_pf_home_dir_path.exists()
with patch.dict(os.environ, {PROMPT_FLOW_HOME_DIR_ENV_VAR: custom_pf_home_dir_path.as_posix()}):
importlib.reload(_constants)
assert _constants.HOME_PROMPT_FLOW_DIR.as_posix() == custom_pf_home_dir_path.as_posix()
assert _constants.HOME_PROMPT_FLOW_DIR.is_dir()
importlib.reload(_constants)
def test_configure_pf_home_dir_with_invalid_path(self) -> None:
from promptflow._sdk import _constants
invalid_path = "/invalid:path"
with patch.dict(os.environ, {PROMPT_FLOW_HOME_DIR_ENV_VAR: invalid_path}):
assert os.getenv(PROMPT_FLOW_HOME_DIR_ENV_VAR) == invalid_path
importlib.reload(_constants)
assert _constants.HOME_PROMPT_FLOW_DIR.as_posix() == (Path.home() / ".promptflow").resolve().as_posix()
importlib.reload(_constants)
@pytest.mark.unittest
class TestCLIUtils:
def test_list_of_dict_to_nested_dict(self):
test_list = [{"node1.connection": "a"}, {"node2.deploy_name": "b"}]
result = list_of_dict_to_nested_dict(test_list)
assert result == {"node1": {"connection": "a"}, "node2": {"deploy_name": "b"}}
test_list = [{"node1.connection": "a"}, {"node1.deploy_name": "b"}]
result = list_of_dict_to_nested_dict(test_list)
assert result == {"node1": {"connection": "a", "deploy_name": "b"}}
def test_append_to_dict_action(self):
parser = argparse.ArgumentParser(prog="test_dict_action")
parser.add_argument("--dict", action=AppendToDictAction, nargs="+")
args = ["--dict", "key1=val1", "'key2=val2'", '"key3=val3"', "key4='val4'", "key5=\"val5'"]
args = parser.parse_args(args)
expect_dict = {
"key1": "val1",
"key2": "val2",
"key3": "val3",
"key4": "val4",
"key5": "\"val5'",
}
assert args.dict[0] == expect_dict
def test_build_sorted_column_widths_tuple_list(self) -> None:
columns = ["col1", "col2", "col3"]
values1 = {"col1": 1, "col2": 4, "col3": 3}
values2 = {"col1": 3, "col2": 3, "col3": 1}
margins = {"col1": 1, "col2": 2, "col3": 2}
# sort by (max(values1, values2) + margins)
res = _build_sorted_column_widths_tuple_list(columns, values1, values2, margins)
assert res == [("col2", 6), ("col3", 5), ("col1", 4)]
def test_calculate_column_widths(self) -> None:
data = [
{
"inputs.url": "https://www.youtube.com/watch?v=o5ZQyXaAv1g",
"inputs.answer": "Channel",
"inputs.evidence": "Url",
"outputs.category": "Channel",
"outputs.evidence": "URL",
},
{
"inputs.url": "https://arxiv.org/abs/2307.04767",
"inputs.answer": "Academic",
"inputs.evidence": "Text content",
"outputs.category": "Academic",
"outputs.evidence": "Text content",
},
{
"inputs.url": "https://play.google.com/store/apps/details?id=com.twitter.android",
"inputs.answer": "App",
"inputs.evidence": "Both",
"outputs.category": "App",
"outputs.evidence": "Both",
},
]
df = pd.DataFrame(data)
terminal_width = 120
res = _calculate_column_widths(df, terminal_width)
assert res == [4, 23, 13, 15, 15, 15]
def test_calculate_column_widths_edge_case(self) -> None:
nan = float("nan")
# test case comes from examples/flow/evaluation/eval-qna-non-rag
data = [
{
"inputs.groundtruth": "The Alpine Explorer Tent has the highest rainfly waterproof rating at 3000m",
"inputs.answer": "There are various tents available in the market that offer different levels of waterproofing. However, one tent that is often highly regarded for its waterproofing capabilities is the MSR Hubba Hubba NX tent. It features a durable rainfly and a bathtub-style floor construction, both of which contribute to its excellent water resistance. It is always recommended to read product specifications and customer reviews to ensure you find a tent that meets your specific waterproofing requirements.", # noqa: E501
"inputs.context": "{${data.context}}",
"inputs.question": "Which tent is the most waterproof?",
"inputs.metrics": "gpt_groundedness,f1_score",
"inputs.line_number": 0,
"inputs.ground_truth": "The Alpine Explorer Tent has the highest rainfly waterproof rating at 3000m",
"outputs.line_number": 0,
"outputs.ada_similarity": nan,
"outputs.f1_score": 0.049999999999999996,
"outputs.gpt_coherence": nan,
"outputs.gpt_fluency": nan,
"outputs.gpt_groundedness": 3.0,
"outputs.gpt_relevance": nan,
"outputs.gpt_similarity": nan,
},
{
"inputs.groundtruth": "The Adventure Dining Table has a higher weight capacity than all of the other camping tables mentioned", # noqa: E501
"inputs.answer": "There are various camping tables available that can hold different amounts of weight. Some heavy-duty camping tables can hold up to 300 pounds or more, while others may have lower weight capacities. It's important to check the specifications of each table before purchasing to ensure it can support the weight you require.", # noqa: E501
"inputs.context": "{${data.context}}",
"inputs.question": "Which tent is the most waterproof?",
"inputs.metrics": "gpt_groundedness,f1_score",
"inputs.ground_truth": "The Alpine Explorer Tent has the highest rainfly waterproof rating at 3000m",
"outputs.line_number": 1,
"outputs.ada_similarity": nan,
"outputs.f1_score": 0.0,
"outputs.gpt_coherence": nan,
"outputs.gpt_fluency": nan,
"outputs.gpt_groundedness": 3.0,
"outputs.gpt_relevance": nan,
"outputs.gpt_similarity": nan,
},
]
df = pd.DataFrame(data)
terminal_width = 74 # GitHub Actions scenario
res = _calculate_column_widths(df, terminal_width)
# the column width should at least 1 to avoid tabulate error
assert res == [4, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
@pytest.mark.unittest
class TestRetryUtils:
def test_retry(self):
counter = 0
class A:
def mock_f(self):
return 1
class B(A):
@retry(Exception, tries=2, delay=1, backoff=1)
def mock_f(self):
nonlocal counter
counter += 1
raise Exception("mock exception")
with pytest.raises(Exception):
B().mock_f()
assert counter == 2
def test_http_retry(self):
counter = 0
def mock_http_request():
nonlocal counter
counter += 1
resp = Response()
resp.status_code = 429
return resp
http_retry_wrapper(mock_http_request, tries=2, delay=1, backoff=1)()
assert counter == 2
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_mlflow_dependencies.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
import promptflow
import promptflow._sdk._mlflow as module
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestMLFlowDependencies:
def test_mlflow_dependencies(self):
assert module.DAG_FILE_NAME == "flow.dag.yaml"
assert module.Flow == promptflow._sdk.entities._flow.Flow
assert module.FlowInvoker == promptflow._sdk._serving.flow_invoker.FlowInvoker
assert module.remove_additional_includes is not None
assert module._merge_local_code_and_additional_includes is not None
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_flow_invoker.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from pathlib import Path
import pytest
from promptflow._sdk._serving._errors import UnexpectedConnectionProviderReturn, UnsupportedConnectionProvider
from promptflow._sdk._serving.flow_invoker import FlowInvoker
from promptflow.exceptions import UserErrorException
PROMOTFLOW_ROOT = Path(__file__).parent.parent.parent.parent
FLOWS_DIR = Path(PROMOTFLOW_ROOT / "tests/test_configs/flows")
EXAMPLE_FLOW = FLOWS_DIR / "web_classification"
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestFlowInvoker:
# Note: e2e test of flow invoker has been covered by test_flow_serve.
def test_flow_invoker_unsupported_connection_provider(self):
with pytest.raises(UnsupportedConnectionProvider):
FlowInvoker(flow=EXAMPLE_FLOW, connection_provider=[])
with pytest.raises(UserErrorException):
FlowInvoker(flow=EXAMPLE_FLOW, connection_provider="unsupported")
def test_flow_invoker_custom_connection_provider(self):
# Return is not a list
with pytest.raises(UnexpectedConnectionProviderReturn) as e:
FlowInvoker(flow=EXAMPLE_FLOW, connection_provider=lambda: {})
assert "should return a list of connections" in str(e.value)
# Return is not connection type
with pytest.raises(UnexpectedConnectionProviderReturn) as e:
FlowInvoker(flow=EXAMPLE_FLOW, connection_provider=lambda: [1, 2])
assert "should be connection type" in str(e.value)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_tool.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import importlib.util
from pathlib import Path
import pytest
TOOL_DIR = Path("./tests/test_configs/tools")
@pytest.mark.unittest
class TestTool:
def get_tool_meta_by_path(self, client, tool_path, module_name):
# Load the module from the file path
spec = importlib.util.spec_from_file_location(module_name, tool_path)
tool_module = importlib.util.module_from_spec(spec)
# Load the module's code
spec.loader.exec_module(tool_module)
# List meta data of tools
tool_meta = client.tools._generate_tool_meta(tool_module)
return tool_meta
def test_python_tool_meta(self, pf):
tool_path = TOOL_DIR / "python_tool.py"
tools_meta, _ = self.get_tool_meta_by_path(pf, tool_path, "python_tool")
# Get python script tool meta
expect_tools_meta = {
"python_tool.my_python_tool": {
"name": "python_tool",
"type": "python",
"inputs": {"input1": {"type": ["string"]}},
"module": "python_tool",
"function": "my_python_tool",
},
"python_tool.my_python_tool_without_name": {
"name": "my_python_tool_without_name",
"type": "python",
"inputs": {"input1": {"type": ["string"]}},
"module": "python_tool",
"function": "my_python_tool_without_name",
},
"python_tool.PythonTool.python_tool": {
"name": "PythonTool.python_tool",
"type": "python",
"inputs": {"connection": {"type": ["AzureOpenAIConnection"]}, "input1": {"type": ["string"]}},
"module": "python_tool",
"class_name": "PythonTool",
"function": "python_tool",
},
}
assert tools_meta == expect_tools_meta
def test_custom_tool_meta(self, pf):
tool_path = TOOL_DIR / "custom_llm_tool.py"
tools_meta, _ = self.get_tool_meta_by_path(pf, tool_path, "custom_llm_tool")
expect_meta = {
"custom_llm_tool.TestCustomLLMTool.tool_func": {
"class_name": "TestCustomLLMTool",
"description": "This is a tool to demonstrate the custom_llm tool type",
"enable_kwargs": True,
"function": "tool_func",
"inputs": {"api": {"type": ["string"]}, "connection": {"type": ["AzureOpenAIConnection"]}},
"module": "custom_llm_tool",
"name": "My Custom LLM Tool",
"type": "custom_llm",
},
"custom_llm_tool.my_tool": {
"description": "This is a tool to demonstrate the custom_llm tool type",
"enable_kwargs": True,
"function": "my_tool",
"inputs": {"connection": {"type": ["CustomConnection"]}},
"module": "custom_llm_tool",
"name": "My Custom LLM Tool",
"type": "custom_llm",
},
}
assert tools_meta == expect_meta
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_local_storage_operations.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pandas as pd
import pytest
from promptflow._sdk._constants import LINE_NUMBER
from promptflow._sdk.operations._local_storage_operations import LocalStorageOperations
@pytest.mark.unittest
class TestLocalStorageOperations:
def test_outputs_padding(self) -> None:
data = [
{LINE_NUMBER: 1, "col": "a"},
{LINE_NUMBER: 2, "col": "b"},
]
df = pd.DataFrame(data)
df_with_padding = LocalStorageOperations._outputs_padding(df, inputs_line_numbers=[0, 1, 2, 3, 4])
df_with_padding.fillna("", inplace=True)
assert len(df_with_padding) == 5
assert df_with_padding.iloc[0].to_dict() == {LINE_NUMBER: 0, "col": ""}
assert df_with_padding.iloc[1].to_dict() == {LINE_NUMBER: 1, "col": "a"}
assert df_with_padding.iloc[2].to_dict() == {LINE_NUMBER: 2, "col": "b"}
assert df_with_padding.iloc[3].to_dict() == {LINE_NUMBER: 3, "col": ""}
assert df_with_padding.iloc[4].to_dict() == {LINE_NUMBER: 4, "col": ""}
# in evaluation run, inputs may not have all line number
df_with_padding = LocalStorageOperations._outputs_padding(df, inputs_line_numbers=[1, 2, 4])
df_with_padding.fillna("", inplace=True)
assert len(df_with_padding) == 3
assert df_with_padding.iloc[0].to_dict() == {LINE_NUMBER: 1, "col": "a"}
assert df_with_padding.iloc[1].to_dict() == {LINE_NUMBER: 2, "col": "b"}
assert df_with_padding.iloc[2].to_dict() == {LINE_NUMBER: 4, "col": ""}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_pf_client.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from promptflow import PFClient
from promptflow._sdk._utils import ClientUserAgentUtil
@pytest.mark.sdk_test
@pytest.mark.e2etest
class TestPFClient:
def test_pf_client_user_agent(self):
PFClient()
assert "promptflow-sdk" in ClientUserAgentUtil.get_user_agent()
assert "promptflow/" not in ClientUserAgentUtil.get_user_agent()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_flow_serve.py | from pathlib import Path
import pytest
from sdk_cli_test.conftest import MODEL_ROOT
from promptflow._cli._pf._flow import _resolve_python_flow_additional_includes
@pytest.mark.unittest
def test_flow_serve_resolve_additional_includes():
# Assert flow path not changed if no additional includes
flow_path = (Path(MODEL_ROOT) / "web_classification").resolve().absolute().as_posix()
resolved_flow_path = _resolve_python_flow_additional_includes(flow_path)
assert flow_path == resolved_flow_path
# Assert additional includes are resolved correctly
flow_path = (Path(MODEL_ROOT) / "web_classification_with_additional_include").resolve().absolute().as_posix()
resolved_flow_path = _resolve_python_flow_additional_includes(flow_path)
assert (Path(resolved_flow_path) / "convert_to_dict.py").exists()
assert (Path(resolved_flow_path) / "fetch_text_content_from_url.py").exists()
assert (Path(resolved_flow_path) / "summarize_text_content.jinja2").exists()
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_exceptions.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import pytest
from azure.core.exceptions import HttpResponseError
from promptflow._sdk._orm import RunInfo
from promptflow.exceptions import _ErrorInfo, ErrorCategory, ErrorTarget, UserErrorException
from promptflow.executor import FlowValidator
from promptflow.executor._errors import InvalidNodeReference
FLOWS_DIR = "./tests/test_configs/flows/print_input_flow"
@pytest.mark.unittest
class TestExceptions:
def test_error_category_with_unknow_error(self, pf):
ex = None
try:
pf.run("./exceptions/flows")
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.UNKNOWN
assert error_type == "FileNotFoundError"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert (
"module=promptflow._sdk._pf_client, "
'code=raise FileNotFoundError(f"flow path {flow} does not exist"), '
"lineno="
) in error_detail
def test_error_category_with_user_error(self, pf):
ex = None
try:
RunInfo.get("run_name")
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.USER_ERROR
assert error_type == "RunNotFoundError"
assert error_target == ErrorTarget.CONTROL_PLANE_SDK
assert error_message == ""
assert (
"module=promptflow._sdk._orm.run_info, "
'code=raise RunNotFoundError(f"Run name {name!r} cannot be found."), '
"lineno="
) in error_detail
def test_error_category_with_system_error(self):
ex = None
try:
FlowValidator._validate_aggregation_inputs({}, {"input1": "value1"})
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.SYSTEM_ERROR
assert error_type == "InvalidAggregationInput"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == (
"The input for aggregation is incorrect. "
"The value for aggregated reference input '{input_key}' should be a list, "
"but received {value_type}. "
"Please adjust the input value to match the expected format."
)
assert (
"module=promptflow.executor.flow_validator, " "code=raise InvalidAggregationInput(, " "lineno="
) in error_detail
def test_error_category_with_http_error(self, subscription_id, resource_group_name, workspace_name):
try:
raise HttpResponseError(message="HttpResponseError")
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.UNKNOWN
assert error_type == "HttpResponseError"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert error_detail == ""
@pytest.mark.parametrize(
"status_code, expected_error_category",
[
(203, ErrorCategory.UNKNOWN),
(304, ErrorCategory.UNKNOWN),
(400, ErrorCategory.UNKNOWN),
(401, ErrorCategory.UNKNOWN),
(429, ErrorCategory.UNKNOWN),
(500, ErrorCategory.UNKNOWN),
],
)
def test_error_category_with_status_code(self, status_code, expected_error_category):
try:
raise Exception()
except Exception as e:
e.status_code = status_code
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == expected_error_category
assert error_type == "Exception"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert error_detail == ""
def test_error_category_with_executor_error(self):
try:
msg_format = (
"Invalid node definitions found in the flow graph. Non-aggregation node '{invalid_reference}' "
"cannot be referenced in the activate config of the aggregation node '{node_name}'. Please "
"review and rectify the node reference."
)
raise InvalidNodeReference(message_format=msg_format, invalid_reference=None, node_name="node_name")
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.USER_ERROR
assert error_type == "InvalidNodeReference"
assert error_target == ErrorTarget.EXECUTOR
assert error_message == (
"Invalid node definitions found in the flow graph. Non-aggregation node '{invalid_reference}' "
"cannot be referenced in the activate config of the aggregation node '{node_name}'. Please "
"review and rectify the node reference."
)
assert error_detail == ""
def test_error_category_with_cause_exception1(self):
"""cause exception is PromptflowException and e is PromptflowException, recording e."""
ex = None
try:
try:
FlowValidator._validate_aggregation_inputs({}, {"input1": "value1"})
except Exception as e:
raise UserErrorException("FlowValidator._validate_aggregation_inputs failed") from e
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.USER_ERROR
assert error_type == "InvalidAggregationInput"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert error_detail == ""
ex = None
try:
try:
FlowValidator._validate_aggregation_inputs({}, {"input1": "value1"})
except Exception as e:
raise UserErrorException(message=str(e), error=e)
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.USER_ERROR
assert error_type == "InvalidAggregationInput"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert error_detail == ""
def test_error_category_with_cause_exception2(self):
"""cause exception is PromptflowException and e is not PromptflowException, recording cause exception."""
ex = None
try:
try:
FlowValidator._validate_aggregation_inputs({}, {"input1": "value1"})
except Exception as e:
raise Exception("FlowValidator._validate_aggregation_inputs failed") from e
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.SYSTEM_ERROR
assert error_type == "InvalidAggregationInput"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == (
"The input for aggregation is incorrect. The value for aggregated reference "
"input '{input_key}' should be a list, but received {value_type}. Please "
"adjust the input value to match the expected format."
)
assert (
"module=promptflow.executor.flow_validator, " "code=raise InvalidAggregationInput(, " "lineno="
) in error_detail
def test_error_category_with_cause_exception3(self, pf):
"""cause exception is not PromptflowException and e is not PromptflowException, recording e exception."""
ex = None
try:
try:
pf.run("./exceptions/flows")
except Exception as e:
raise Exception("pf run failed") from e
except Exception as e:
ex = e
error_category, error_type, error_target, error_message, error_detail = _ErrorInfo.get_error_info(ex)
assert error_category == ErrorCategory.UNKNOWN
assert error_type == "Exception"
assert error_target == ErrorTarget.UNKNOWN
assert error_message == ""
assert error_detail == ""
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_orm.py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import uuid
import pytest
from sqlalchemy import TEXT, Column, create_engine, inspect, text
from sqlalchemy.orm import declarative_base, sessionmaker
from promptflow._sdk._constants import HOME_PROMPT_FLOW_DIR
from promptflow._sdk._orm.session import create_or_update_table, support_transaction
TABLENAME = "orm_entity"
def random_string() -> str:
return str(uuid.uuid4())
def dump(obj, engine) -> None:
session_maker = sessionmaker(bind=engine)
with session_maker() as session:
session.add(obj)
session.commit()
class SchemaV1(declarative_base()):
__tablename__ = TABLENAME
column1 = Column(TEXT, primary_key=True)
column2 = Column(TEXT)
__pf_schema_version__ = "1"
@staticmethod
def generate(engine) -> None:
entity = SchemaV1(column1=random_string(), column2=random_string())
dump(entity, engine)
return
class SchemaV2(declarative_base()):
__tablename__ = TABLENAME
column1 = Column(TEXT, primary_key=True)
column2 = Column(TEXT)
column3 = Column(TEXT)
__pf_schema_version__ = "2"
@staticmethod
def generate(engine) -> None:
entity = SchemaV2(column1=random_string(), column2=random_string(), column3=random_string())
dump(entity, engine)
return
class SchemaV3(declarative_base()):
__tablename__ = TABLENAME
column1 = Column(TEXT, primary_key=True)
column2 = Column(TEXT)
column3 = Column(TEXT)
column4 = Column(TEXT)
__pf_schema_version__ = "3"
@staticmethod
def generate(engine) -> None:
entity = SchemaV3(
column1=random_string(), column2=random_string(), column3=random_string(), column4=random_string()
)
dump(entity, engine)
return
# exactly same schema as SchemaV3
class SchemaV4(declarative_base()):
__tablename__ = TABLENAME
column1 = Column(TEXT, primary_key=True)
column2 = Column(TEXT)
column3 = Column(TEXT)
column4 = Column(TEXT)
__pf_schema_version__ = "4"
@staticmethod
def generate(engine) -> None:
entity = SchemaV4(
column1=random_string(), column2=random_string(), column3=random_string(), column4=random_string()
)
dump(entity, engine)
return
def mock_use(engine, orm_class, entity_num: int = 1) -> None:
create_or_update_table(engine, orm_class, TABLENAME)
for _ in range(entity_num):
orm_class.generate(engine)
def generate_engine():
db_path = (HOME_PROMPT_FLOW_DIR / ".test" / f"{uuid.uuid4()}.sqlite").resolve()
if not db_path.parent.is_dir():
db_path.parent.mkdir(parents=True, exist_ok=True)
return create_engine(f"sqlite:///{str(db_path)}", future=True)
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestSchemaManagement:
def test_fixed_version(self) -> None:
engine = generate_engine()
mock_use(engine, SchemaV3)
mock_use(engine, SchemaV3, entity_num=2)
mock_use(engine, SchemaV3, entity_num=3)
# 1 table
assert inspect(engine).has_table(TABLENAME)
# 6 rows
entities = [entity for entity in sessionmaker(bind=engine)().query(SchemaV3).all()]
assert len(entities) == 6
def test_version_upgrade(self) -> None:
engine = generate_engine()
mock_use(engine, SchemaV1)
mock_use(engine, SchemaV2)
mock_use(engine, SchemaV3)
# 3 tables: 1 current and 2 legacy
assert inspect(engine).has_table(TABLENAME)
assert inspect(engine).has_table(f"{TABLENAME}_v1")
assert inspect(engine).has_table(f"{TABLENAME}_v2")
# 2 rows in current table
entities = [entity for entity in sessionmaker(bind=engine)().query(SchemaV3).all()]
assert len(entities) == 3
def test_version_downgrade(self, capfd) -> None:
engine = generate_engine()
mock_use(engine, SchemaV3)
mock_use(engine, SchemaV2)
mock_use(engine, SchemaV1)
# 1 table
assert inspect(engine).has_table(TABLENAME)
# 2 rows
entities = [entity for entity in sessionmaker(bind=engine)().query(SchemaV1).all()]
assert len(entities) == 3
# with warning message
out, _ = capfd.readouterr()
assert "While we will do our best to ensure compatibility, " in out
def test_version_mixing(self) -> None:
engine = generate_engine()
mock_use(engine, SchemaV2, entity_num=2)
mock_use(engine, SchemaV3, entity_num=3) # 1 upgrade
mock_use(engine, SchemaV2, entity_num=1)
mock_use(engine, SchemaV1, entity_num=4)
mock_use(engine, SchemaV3, entity_num=2)
# 2 tables: 1 current and 1 legacy
assert inspect(engine).has_table(TABLENAME)
assert inspect(engine).has_table(f"{TABLENAME}_v2")
# 12(all) rows in current table
entities = [entity for entity in sessionmaker(bind=engine)().query(SchemaV3).all()]
assert len(entities) == 12
def test_version_across_same_schema_version(self, capfd) -> None:
engine = generate_engine()
# when 3->4, no warning message
mock_use(engine, SchemaV3)
mock_use(engine, SchemaV4)
out, _ = capfd.readouterr()
assert "While we will do our best to ensure compatibility, " not in out
# same schema, no warning message
mock_use(engine, SchemaV4)
out, _ = capfd.readouterr()
assert "While we will do our best to ensure compatibility, " not in out
# when 4->3, warning message on upgrade should be printed
mock_use(engine, SchemaV3)
out, _ = capfd.readouterr()
assert "While we will do our best to ensure compatibility, " in out
def test_db_without_schema_info(self) -> None:
engine = generate_engine()
# manually create a table to avoid creation of schema_info table
with engine.begin() as connection:
connection.execute(text(f"CREATE TABLE {TABLENAME} (column1 TEXT PRIMARY KEY);"))
connection.execute(
text(f"INSERT INTO {TABLENAME} (column1) VALUES (:column1);"),
{"column1": random_string()},
)
mock_use(engine, SchemaV3)
# 2 tables: 1 current and 1 legacy with name containing timestamp
assert inspect(engine).has_table(TABLENAME)
# 2 rows in current table
entities = [entity for entity in sessionmaker(bind=engine)().query(SchemaV3).all()]
assert len(entities) == 2
@pytest.mark.sdk_test
@pytest.mark.unittest
class TestTransaction:
def test_commit(self) -> None:
engine = generate_engine()
engine = support_transaction(engine)
tablename = "transaction_test"
sql = f"CREATE TABLE {tablename} (id INTEGER PRIMARY KEY);"
with engine.begin() as connection:
connection.execute(text(sql))
connection.commit()
assert inspect(engine).has_table(tablename)
def test_rollback(self) -> None:
engine = generate_engine()
engine = support_transaction(engine)
tablename = "transaction_test"
sql = f"CREATE TABLE {tablename} (id INTEGER PRIMARY KEY);"
with engine.begin() as connection:
connection.execute(text(sql))
connection.rollback()
assert not inspect(engine).has_table(tablename)
def test_exception_during_transaction(self) -> None:
engine = generate_engine()
engine = support_transaction(engine)
tablename = "transaction_test"
sql = f"CREATE TABLE {tablename} (id INTEGER PRIMARY KEY);"
try:
with engine.begin() as connection:
connection.execute(text(sql))
# raise exception, so that SQLAlchemy should help rollback
raise Exception("test exception")
except Exception:
pass
assert not inspect(engine).has_table(tablename)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test | promptflow_repo/promptflow/src/promptflow/tests/sdk_cli_test/unittests/test_cli_activity_name.py | import pytest
from promptflow._cli._pf.entry import get_parser_args
from promptflow._cli._utils import _get_cli_activity_name
def get_cli_activity_name(cmd):
prog, args = get_parser_args(list(cmd)[1:])
return _get_cli_activity_name(cli=prog, args=args)
@pytest.mark.unittest
class TestCliTimeConsume:
def test_pf_run_create(self, activity_name="pf.run.create") -> None:
assert get_cli_activity_name(
cmd=(
"pf",
"run",
"create",
"--flow",
"print_input_flow",
"--data",
"print_input_flow.jsonl",
)) == activity_name
def test_pf_run_update(self, activity_name="pf.run.update") -> None:
assert get_cli_activity_name(
cmd=(
"pf",
"run",
"update",
"--name",
"test_name",
"--set",
"description=test pf run update"
)) == activity_name
def test_pf_flow_test(self, activity_name="pf.flow.test"):
assert get_cli_activity_name(
cmd=(
"pf",
"flow",
"test",
"--flow",
"print_input_flow",
"--inputs",
"text=https://www.youtube.com/watch?v=o5ZQyXaAv1g",
)) == activity_name
def test_pf_flow_build(self, activity_name="pf.flow.build"):
assert get_cli_activity_name(
cmd=(
"pf",
"flow",
"build",
"--source",
"print_input_flow/flow.dag.yaml",
"--output",
"./",
"--format",
"docker",
)) == activity_name
def test_pf_connection_create(self, activity_name="pf.connection.create"):
assert get_cli_activity_name(
cmd=(
"pf",
"connection",
"create",
"--file",
"azure_openai_connection.yaml",
"--name",
"test_name",
)) == activity_name
def test_pf_connection_list(self, activity_name="pf.connection.list"):
assert get_cli_activity_name(cmd=("pf", "connection", "list")) == activity_name
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/wrong_node_reference/flow.dag.yaml | name: node_wrong_reference
inputs:
text:
type: string
outputs:
result:
type: string
reference: ${second_node}
nodes:
- name: first_node
type: python
source:
type: code
path: test.py
inputs:
text: ${inputs.text}
aggregation: true
- name: second_node
type: python
source:
type: code
path: test.py
inputs:
text: ${third_node}
aggregation: true
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/outputs_with_invalid_flow_inputs_ref/stringify_num.py | from promptflow import tool
@tool
def stringify_num(num: int):
return str(num)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/outputs_with_invalid_flow_inputs_ref/flow.dag.yaml | inputs:
num:
type: int
outputs:
content:
type: string
reference: ${stringify_num.output}
num:
type: int
reference: ${inputs.num11}
nodes:
- name: stringify_num
type: python
source:
type: code
path: stringify_num.py
inputs:
num: ${inputs.num}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/tool_type_missing/summarize_text_content__variant_1.jinja2 | Please summarize some keywords of this paragraph and have some details of each keywords.
Do not add any information that is not in the text.
Text: {{text}}
Summary:
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/tool_type_missing/flow.dag.yaml | inputs:
text:
type: string
outputs:
output:
type: string
reference: ${summarize_text_content.output}
nodes:
- name: summarize_text_content
source:
type: code
path: summarize_text_content__variant_1.jinja2
inputs:
deployment_name: gpt-35-turbo
suffix: ''
max_tokens: '256'
temperature: '0.2'
top_p: '1.0'
logprobs: ''
echo: 'False'
stop: ''
presence_penalty: '0'
frequency_penalty: '0'
best_of: '1'
logit_bias: ''
text: ${inputs.text}
provider: AzureOpenAI
connection: azure_open_ai_connection
api: completion
module: promptflow.tools.aoai
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/outputs_reference_not_valid/stringify_num.py | from promptflow import tool
@tool
def stringify_num(num: int):
return str(num)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/outputs_reference_not_valid/flow.dag.yaml | inputs:
num:
type: int
outputs:
content:
type: string
reference: ${another_stringify_num.output}
nodes:
- name: stringify_num
type: python
source:
type: code
path: stringify_num.py
inputs:
num: ${inputs.num}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/nodes_names_duplicated/stringify_num.py | from promptflow import tool
@tool
def stringify_num(num: int):
return str(num)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/nodes_names_duplicated/another_stringify_num.py | from promptflow import tool
@tool
def stringify_num(num: int):
return str(num)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/nodes_names_duplicated/flow.dag.yaml | inputs:
num:
type: int
outputs:
content:
type: string
reference: ${stringify_num.output}
nodes:
- name: stringify_num
type: python
source:
type: code
path: stringify_num.py
inputs:
num: ${inputs.num}
- name: stringify_num
type: python
source:
type: code
path: another_stringify_num.py
inputs:
num: ${inputs.num}
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/wrong_api/summarize_text_content__variant_1.jinja2 | Please summarize some keywords of this paragraph and have some details of each keywords.
Do not add any information that is not in the text.
Text: {{text}}
Summary:
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/wrong_api/flow.dag.yaml | inputs:
text:
type: string
outputs:
output:
type: string
reference: ${summarize_text_content.output}
nodes:
- name: summarize_text_content
type: llm
source:
type: code
path: summarize_text_content__variant_1.jinja2
inputs:
deployment_name: text-davinci-003
suffix: ''
max_tokens: '256'
temperature: '0.2'
top_p: '1.0'
logprobs: ''
echo: 'False'
stop: ''
presence_penalty: '0'
frequency_penalty: '0'
best_of: '1'
logit_bias: ''
text: ${inputs.text}
provider: AzureOpenAI
connection: azure_open_ai_connection
api: completion_1
module: promptflow.tools.aoai
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/aggregation_activate_reference_non_aggregation/flow.dag.yaml | inputs:
groundtruth:
type: string
prediction:
type: string
outputs:
grade:
type: string
reference: ${grade.output}
nodes:
- name: grade
type: python
source:
type: code
path: grade.py
inputs:
groundtruth: ${inputs.groundtruth}
prediction: ${inputs.prediction}
- name: calculate_accuracy
type: python
source:
type: code
path: calculate_accuracy.py
inputs:
grades: ${grade.output}
activate:
when: ${grade.output}
is: 1
aggregation: true
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/node_missing_type_or_source/divide_num.py | from promptflow import tool
@tool
def divide_num(num: int) -> int:
return (int)(num / 2)
| 0 |
promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows | promptflow_repo/promptflow/src/promptflow/tests/test_configs/wrong_flows/node_missing_type_or_source/flow.dag.yaml | inputs:
num:
type: int
outputs:
content:
type: string
reference: ${divide_num.output}
nodes:
- name: divide_num
source:
type: code
path: divide_num.py
inputs:
num: ${inputs.num}
| 0 |
Subsets and Splits