text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Updates specified tenant. <END_TASK> <USER_TASK:> Description: def update_tenant( self, tenant, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates specified tenant. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.TenantServiceClient() >>> >>> # TODO: Initialize `tenant`: >>> tenant = {} >>> >>> response = client.update_tenant(tenant) Args: tenant (Union[dict, ~google.cloud.talent_v4beta1.types.Tenant]): Required. The tenant resource to replace the current resource in the system. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Tenant` update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``tenant`` are updated. Otherwise all the fields are updated. A field mask to specify the tenant fields to be updated. Only top level fields of ``Tenant`` are supported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Tenant` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "update_tenant" not in self._inner_api_calls: self._inner_api_calls[ "update_tenant" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_tenant, default_retry=self._method_configs["UpdateTenant"].retry, default_timeout=self._method_configs["UpdateTenant"].timeout, client_info=self._client_info, ) request = tenant_service_pb2.UpdateTenantRequest( tenant=tenant, update_mask=update_mask ) return self._inner_api_calls["update_tenant"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Apply a list of decorators to a given function. <END_TASK> <USER_TASK:> Description: def _apply_decorators(func, decorators): """Apply a list of decorators to a given function. ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. """
decorators = filter(_is_not_none_or_false, reversed(decorators)) for decorator in decorators: func = decorator(func) return func
<SYSTEM_TASK:> Determines how timeout should be applied to a wrapped method. <END_TASK> <USER_TASK:> Description: def _determine_timeout(default_timeout, specified_timeout, retry): """Determines how timeout should be applied to a wrapped method. Args: default_timeout (Optional[Timeout]): The default timeout specified at method creation time. specified_timeout (Optional[Timeout]): The timeout specified at invocation time. If :attr:`DEFAULT`, this will be set to the ``default_timeout``. retry (Optional[Retry]): The retry specified at invocation time. Returns: Optional[Timeout]: The timeout to apply to the method or ``None``. """
if specified_timeout is DEFAULT: specified_timeout = default_timeout if specified_timeout is default_timeout: # If timeout is the default and the default timeout is exponential and # a non-default retry is specified, make sure the timeout's deadline # matches the retry's. This handles the case where the user leaves # the timeout default but specifies a lower deadline via the retry. if ( retry and retry is not DEFAULT and isinstance(default_timeout, timeout.ExponentialTimeout) ): return default_timeout.with_deadline(retry._deadline) else: return default_timeout # If timeout is specified as a number instead of a Timeout instance, # convert it to a ConstantTimeout. if isinstance(specified_timeout, (int, float)): return timeout.ConstantTimeout(specified_timeout) else: return specified_timeout
<SYSTEM_TASK:> Wrap an RPC method with common behavior. <END_TASK> <USER_TASK:> Description: def wrap_method( func, default_retry=None, default_timeout=None, client_info=client_info.DEFAULT_CLIENT_INFO, ): """Wrap an RPC method with common behavior. This applies common error wrapping, retry, and timeout behavior a function. The wrapped function will take optional ``retry`` and ``timeout`` arguments. For example:: import google.api_core.gapic_v1.method from google.api_core import retry from google.api_core import timeout # The original RPC method. def get_topic(name, timeout=None): request = publisher_v2.GetTopicRequest(name=name) return publisher_stub.GetTopic(request, timeout=timeout) default_retry = retry.Retry(deadline=60) default_timeout = timeout.Timeout(deadline=60) wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method( get_topic, default_retry) # Execute get_topic with default retry and timeout: response = wrapped_get_topic() # Execute get_topic without doing any retying but with the default # timeout: response = wrapped_get_topic(retry=None) # Execute get_topic but only retry on 5xx errors: my_retry = retry.Retry(retry.if_exception_type( exceptions.InternalServerError)) response = wrapped_get_topic(retry=my_retry) The way this works is by late-wrapping the given function with the retry and timeout decorators. Essentially, when ``wrapped_get_topic()`` is called: * ``get_topic()`` is first wrapped with the ``timeout`` into ``get_topic_with_timeout``. * ``get_topic_with_timeout`` is wrapped with the ``retry`` into ``get_topic_with_timeout_and_retry()``. * The final ``get_topic_with_timeout_and_retry`` is called passing through the ``args`` and ``kwargs``. The callstack is therefore:: method.__call__() -> Retry.__call__() -> Timeout.__call__() -> wrap_errors() -> get_topic() Note that if ``timeout`` or ``retry`` is ``None``, then they are not applied to the function. For example, ``wrapped_get_topic(timeout=None, retry=None)`` is more or less equivalent to just calling ``get_topic`` but with error re-mapping. Args: func (Callable[Any]): The function to wrap. It should accept an optional ``timeout`` argument. If ``metadata`` is not ``None``, it should accept a ``metadata`` argument. default_retry (Optional[google.api_core.Retry]): The default retry strategy. If ``None``, the method will not retry by default. default_timeout (Optional[google.api_core.Timeout]): The default timeout strategy. Can also be specified as an int or float. If ``None``, the method will not have timeout specified by default. client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]): Client information used to create a user-agent string that's passed as gRPC metadata to the method. If unspecified, then a sane default will be used. If ``None``, then no user agent metadata will be provided to the RPC method. Returns: Callable: A new callable that takes optional ``retry`` and ``timeout`` arguments and applies the common error mapping, retry, timeout, and metadata behavior to the low-level RPC method. """
func = grpc_helpers.wrap_errors(func) if client_info is not None: user_agent_metadata = [client_info.to_grpc_metadata()] else: user_agent_metadata = None return general_helpers.wraps(func)( _GapicCallable( func, default_retry, default_timeout, metadata=user_agent_metadata ) )
<SYSTEM_TASK:> Pre-flight ``Bucket`` name validation. <END_TASK> <USER_TASK:> Description: def _validate_name(name): """Pre-flight ``Bucket`` name validation. :type name: str or :data:`NoneType` :param name: Proposed bucket name. :rtype: str or :data:`NoneType` :returns: ``name`` if valid. """
if name is None: return # The first and las characters must be alphanumeric. if not all([name[0].isalnum(), name[-1].isalnum()]): raise ValueError("Bucket names must start and end with a number or letter.") return name
<SYSTEM_TASK:> Read blocks from a buffer and update a hash with them. <END_TASK> <USER_TASK:> Description: def _write_buffer_to_hash(buffer_object, hash_obj, digest_block_size=8192): """Read blocks from a buffer and update a hash with them. :type buffer_object: bytes buffer :param buffer_object: Buffer containing bytes used to update a hash object. :type hash_obj: object that implements update :param hash_obj: A hash object (MD5 or CRC32-C). :type digest_block_size: int :param digest_block_size: The block size to write to the hash. Defaults to 8192. """
block = buffer_object.read(digest_block_size) while len(block) > 0: hash_obj.update(block) # Update the block for the next iteration. block = buffer_object.read(digest_block_size)
<SYSTEM_TASK:> Reload properties from Cloud Storage. <END_TASK> <USER_TASK:> Description: def reload(self, client=None): """Reload properties from Cloud Storage. If :attr:`user_project` is set, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. """
client = self._require_client(client) query_params = self._query_params # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. query_params["projection"] = "noAcl" api_response = client._connection.api_request( method="GET", path=self.path, query_params=query_params, headers=self._encryption_headers(), _target_object=self, ) self._set_properties(api_response)
<SYSTEM_TASK:> Update field of this object's properties. <END_TASK> <USER_TASK:> Description: def _patch_property(self, name, value): """Update field of this object's properties. This method will only update the field provided and will not touch the other fields. It **will not** reload the properties from the server. The behavior is local only and syncing occurs via :meth:`patch`. :type name: str :param name: The field name to update. :type value: object :param value: The value being updated. """
self._changes.add(name) self._properties[name] = value
<SYSTEM_TASK:> Sends all properties in a PUT request. <END_TASK> <USER_TASK:> Description: def update(self, client=None): """Sends all properties in a PUT request. Updates the ``_properties`` with the response from the backend. If :attr:`user_project` is set, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. """
client = self._require_client(client) query_params = self._query_params query_params["projection"] = "full" api_response = client._connection.api_request( method="PUT", path=self.path, data=self._properties, query_params=query_params, _target_object=self, ) self._set_properties(api_response)
<SYSTEM_TASK:> Map the callback request to the appropriate gRPC request. <END_TASK> <USER_TASK:> Description: def dispatch_callback(self, items): """Map the callback request to the appropriate gRPC request. Args: action (str): The method to be invoked. kwargs (Dict[str, Any]): The keyword arguments for the method specified by ``action``. Raises: ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """
if not self._manager.is_active: return batched_commands = collections.defaultdict(list) for item in items: batched_commands[item.__class__].append(item) _LOGGER.debug("Handling %d batched requests", len(items)) if batched_commands[requests.LeaseRequest]: self.lease(batched_commands.pop(requests.LeaseRequest)) if batched_commands[requests.ModAckRequest]: self.modify_ack_deadline(batched_commands.pop(requests.ModAckRequest)) # Note: Drop and ack *must* be after lease. It's possible to get both # the lease the and ack/drop request in the same batch. if batched_commands[requests.AckRequest]: self.ack(batched_commands.pop(requests.AckRequest)) if batched_commands[requests.NackRequest]: self.nack(batched_commands.pop(requests.NackRequest)) if batched_commands[requests.DropRequest]: self.drop(batched_commands.pop(requests.DropRequest))
<SYSTEM_TASK:> Acknowledge the given messages. <END_TASK> <USER_TASK:> Description: def ack(self, items): """Acknowledge the given messages. Args: items(Sequence[AckRequest]): The items to acknowledge. """
# If we got timing information, add it to the histogram. for item in items: time_to_ack = item.time_to_ack if time_to_ack is not None: self._manager.ack_histogram.add(time_to_ack) ack_ids = [item.ack_id for item in items] request = types.StreamingPullRequest(ack_ids=ack_ids) self._manager.send(request) # Remove the message from lease management. self.drop(items)
<SYSTEM_TASK:> Remove the given messages from lease management. <END_TASK> <USER_TASK:> Description: def drop(self, items): """Remove the given messages from lease management. Args: items(Sequence[DropRequest]): The items to drop. """
self._manager.leaser.remove(items) self._manager.maybe_resume_consumer()
<SYSTEM_TASK:> Add the given messages to lease management. <END_TASK> <USER_TASK:> Description: def lease(self, items): """Add the given messages to lease management. Args: items(Sequence[LeaseRequest]): The items to lease. """
self._manager.leaser.add(items) self._manager.maybe_pause_consumer()
<SYSTEM_TASK:> Modify the ack deadline for the given messages. <END_TASK> <USER_TASK:> Description: def modify_ack_deadline(self, items): """Modify the ack deadline for the given messages. Args: items(Sequence[ModAckRequest]): The items to modify. """
ack_ids = [item.ack_id for item in items] seconds = [item.seconds for item in items] request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=seconds ) self._manager.send(request)
<SYSTEM_TASK:> Explicitly deny receipt of messages. <END_TASK> <USER_TASK:> Description: def nack(self, items): """Explicitly deny receipt of messages. Args: items(Sequence[NackRequest]): The items to deny. """
self.modify_ack_deadline( [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] ) self.drop([requests.DropRequest(*item) for item in items])
<SYSTEM_TASK:> Submits a job to a cluster. <END_TASK> <USER_TASK:> Description: def submit_job( self, project_id, region, job, request_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Submits a job to a cluster. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.JobControllerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `region`: >>> region = '' >>> >>> # TODO: Initialize `job`: >>> job = {} >>> >>> response = client.submit_job(project_id, region, job) Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. region (str): Required. The Cloud Dataproc region in which to handle the request. job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The job resource. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.Job` request_id (str): Optional. A unique id used to identify the request. If the server receives two ``SubmitJobRequest`` requests with the same id, then the second request will be ignored and the first ``Job`` created and stored in the backend is returned. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "submit_job" not in self._inner_api_calls: self._inner_api_calls[ "submit_job" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.submit_job, default_retry=self._method_configs["SubmitJob"].retry, default_timeout=self._method_configs["SubmitJob"].timeout, client_info=self._client_info, ) request = jobs_pb2.SubmitJobRequest( project_id=project_id, region=region, job=job, request_id=request_id ) return self._inner_api_calls["submit_job"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Updates a job in a project. <END_TASK> <USER_TASK:> Description: def update_job( self, project_id, region, job_id, job, update_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates a job in a project. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.JobControllerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `region`: >>> region = '' >>> >>> # TODO: Initialize `job_id`: >>> job_id = '' >>> >>> # TODO: Initialize `job`: >>> job = {} >>> >>> # TODO: Initialize `update_mask`: >>> update_mask = {} >>> >>> response = client.update_job(project_id, region, job_id, job, update_mask) Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. region (str): Required. The Cloud Dataproc region in which to handle the request. job_id (str): Required. The job ID. job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The changes to the job. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.Job` update_mask (Union[dict, ~google.cloud.dataproc_v1beta2.types.FieldMask]): Required. Specifies the path, relative to Job, of the field to update. For example, to update the labels of a Job the update\_mask parameter would be specified as labels, and the ``PATCH`` request body would specify the new value. Note: Currently, labels is the only field that can be updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.Job` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "update_job" not in self._inner_api_calls: self._inner_api_calls[ "update_job" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_job, default_retry=self._method_configs["UpdateJob"].retry, default_timeout=self._method_configs["UpdateJob"].timeout, client_info=self._client_info, ) request = jobs_pb2.UpdateJobRequest( project_id=project_id, region=region, job_id=job_id, job=job, update_mask=update_mask, ) return self._inner_api_calls["update_job"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Return a fully-qualified instance string. <END_TASK> <USER_TASK:> Description: def instance_path(cls, project, instance): """Return a fully-qualified instance string."""
return google.api_core.path_template.expand( "projects/{project}/instances/{instance}", project=project, instance=instance, )
<SYSTEM_TASK:> Return a fully-qualified cluster string. <END_TASK> <USER_TASK:> Description: def cluster_path(cls, project, instance, cluster): """Return a fully-qualified cluster string."""
return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/clusters/{cluster}", project=project, instance=instance, cluster=cluster, )
<SYSTEM_TASK:> Lists information about instances in a project. <END_TASK> <USER_TASK:> Description: def list_instances( self, parent, page_token=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists information about instances in a project. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> response = client.list_instances(parent) Args: parent (str): The unique name of the project for which a list of instances is requested. Values are of the form ``projects/<project>``. page_token (str): DEPRECATED: This field is unused and ignored. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types.ListInstancesResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "list_instances" not in self._inner_api_calls: self._inner_api_calls[ "list_instances" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_instances, default_retry=self._method_configs["ListInstances"].retry, default_timeout=self._method_configs["ListInstances"].timeout, client_info=self._client_info, ) request = bigtable_instance_admin_pb2.ListInstancesRequest( parent=parent, page_token=page_token ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["list_instances"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Partially updates an instance within a project. <END_TASK> <USER_TASK:> Description: def partial_update_instance( self, instance, update_mask, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Partially updates an instance within a project. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> # TODO: Initialize `instance`: >>> instance = {} >>> >>> # TODO: Initialize `update_mask`: >>> update_mask = {} >>> >>> response = client.partial_update_instance(instance, update_mask) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: instance (Union[dict, ~google.cloud.bigtable_admin_v2.types.Instance]): The Instance which will (partially) replace the current value. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Instance` update_mask (Union[dict, ~google.cloud.bigtable_admin_v2.types.FieldMask]): The subset of Instance fields which should be replaced. Must be explicitly set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "partial_update_instance" not in self._inner_api_calls: self._inner_api_calls[ "partial_update_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.partial_update_instance, default_retry=self._method_configs["PartialUpdateInstance"].retry, default_timeout=self._method_configs["PartialUpdateInstance"].timeout, client_info=self._client_info, ) request = bigtable_instance_admin_pb2.PartialUpdateInstanceRequest( instance=instance, update_mask=update_mask ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("instance.name", instance.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["partial_update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, instance_pb2.Instance, metadata_type=bigtable_instance_admin_pb2.UpdateInstanceMetadata, )
<SYSTEM_TASK:> Creates a cluster within an instance. <END_TASK> <USER_TASK:> Description: def create_cluster( self, parent, cluster_id, cluster, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a cluster within an instance. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> # TODO: Initialize `cluster`: >>> cluster = {} >>> >>> response = client.create_cluster(parent, cluster_id, cluster) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): The unique name of the instance in which to create the new cluster. Values are of the form ``projects/<project>/instances/<instance>``. cluster_id (str): The ID to be used when referring to the new cluster within its instance, e.g., just ``mycluster`` rather than ``projects/myproject/instances/myinstance/clusters/mycluster``. cluster (Union[dict, ~google.cloud.bigtable_admin_v2.types.Cluster]): The cluster to be created. Fields marked ``OutputOnly`` must be left blank. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Cluster` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_cluster" not in self._inner_api_calls: self._inner_api_calls[ "create_cluster" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_cluster, default_retry=self._method_configs["CreateCluster"].retry, default_timeout=self._method_configs["CreateCluster"].timeout, client_info=self._client_info, ) request = bigtable_instance_admin_pb2.CreateClusterRequest( parent=parent, cluster_id=cluster_id, cluster=cluster ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_cluster"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, instance_pb2.Cluster, metadata_type=bigtable_instance_admin_pb2.CreateClusterMetadata, )
<SYSTEM_TASK:> Updates a cluster within an instance. <END_TASK> <USER_TASK:> Description: def update_cluster( self, name, serve_nodes, location=None, state=None, default_storage_type=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates a cluster within an instance. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> name = client.cluster_path('[PROJECT]', '[INSTANCE]', '[CLUSTER]') >>> >>> # TODO: Initialize `serve_nodes`: >>> serve_nodes = 0 >>> >>> response = client.update_cluster(name, serve_nodes) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): (``OutputOnly``) The unique name of the cluster. Values are of the form ``projects/<project>/instances/<instance>/clusters/[a-z][-a-z0-9]*``. serve_nodes (int): The number of nodes allocated to this cluster. More nodes enable higher throughput and more consistent performance. location (str): (``CreationOnly``) The location where this cluster's nodes and storage reside. For best performance, clients should be located as close as possible to this cluster. Currently only zones are supported, so values should be of the form ``projects/<project>/locations/<zone>``. state (~google.cloud.bigtable_admin_v2.types.State): (``OutputOnly``) The current state of the cluster. default_storage_type (~google.cloud.bigtable_admin_v2.types.StorageType): (``CreationOnly``) The type of storage used by this cluster to serve its parent instance's tables, unless explicitly overridden. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "update_cluster" not in self._inner_api_calls: self._inner_api_calls[ "update_cluster" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_cluster, default_retry=self._method_configs["UpdateCluster"].retry, default_timeout=self._method_configs["UpdateCluster"].timeout, client_info=self._client_info, ) request = instance_pb2.Cluster( name=name, serve_nodes=serve_nodes, location=location, state=state, default_storage_type=default_storage_type, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["update_cluster"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, instance_pb2.Cluster, metadata_type=bigtable_instance_admin_pb2.UpdateClusterMetadata, )
<SYSTEM_TASK:> Creates an app profile within an instance. <END_TASK> <USER_TASK:> Description: def create_app_profile( self, parent, app_profile_id, app_profile, ignore_warnings=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates an app profile within an instance. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `app_profile_id`: >>> app_profile_id = '' >>> >>> # TODO: Initialize `app_profile`: >>> app_profile = {} >>> >>> response = client.create_app_profile(parent, app_profile_id, app_profile) Args: parent (str): The unique name of the instance in which to create the new app profile. Values are of the form ``projects/<project>/instances/<instance>``. app_profile_id (str): The ID to be used when referring to the new app profile within its instance, e.g., just ``myprofile`` rather than ``projects/myproject/instances/myinstance/appProfiles/myprofile``. app_profile (Union[dict, ~google.cloud.bigtable_admin_v2.types.AppProfile]): The app profile to be created. Fields marked ``OutputOnly`` will be ignored. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` ignore_warnings (bool): If true, ignore safety checks when creating the app profile. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_app_profile" not in self._inner_api_calls: self._inner_api_calls[ "create_app_profile" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_app_profile, default_retry=self._method_configs["CreateAppProfile"].retry, default_timeout=self._method_configs["CreateAppProfile"].timeout, client_info=self._client_info, ) request = bigtable_instance_admin_pb2.CreateAppProfileRequest( parent=parent, app_profile_id=app_profile_id, app_profile=app_profile, ignore_warnings=ignore_warnings, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_app_profile"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Updates an app profile within an instance. <END_TASK> <USER_TASK:> Description: def update_app_profile( self, app_profile, update_mask, ignore_warnings=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates an app profile within an instance. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableInstanceAdminClient() >>> >>> # TODO: Initialize `app_profile`: >>> app_profile = {} >>> >>> # TODO: Initialize `update_mask`: >>> update_mask = {} >>> >>> response = client.update_app_profile(app_profile, update_mask) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: app_profile (Union[dict, ~google.cloud.bigtable_admin_v2.types.AppProfile]): The app profile which will (partially) replace the current value. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` update_mask (Union[dict, ~google.cloud.bigtable_admin_v2.types.FieldMask]): The subset of app profile fields which should be replaced. If unset, all fields will be replaced. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.FieldMask` ignore_warnings (bool): If true, ignore safety checks when updating the app profile. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "update_app_profile" not in self._inner_api_calls: self._inner_api_calls[ "update_app_profile" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_app_profile, default_retry=self._method_configs["UpdateAppProfile"].retry, default_timeout=self._method_configs["UpdateAppProfile"].timeout, client_info=self._client_info, ) request = bigtable_instance_admin_pb2.UpdateAppProfileRequest( app_profile=app_profile, update_mask=update_mask, ignore_warnings=ignore_warnings, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("app_profile.name", app_profile.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["update_app_profile"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, instance_pb2.AppProfile, metadata_type=bigtable_instance_admin_pb2.UpdateAppProfileMetadata, )
<SYSTEM_TASK:> Return a fully-qualified annotation_spec_set string. <END_TASK> <USER_TASK:> Description: def annotation_spec_set_path(cls, project, annotation_spec_set): """Return a fully-qualified annotation_spec_set string."""
return google.api_core.path_template.expand( "projects/{project}/annotationSpecSets/{annotation_spec_set}", project=project, annotation_spec_set=annotation_spec_set, )
<SYSTEM_TASK:> Return a fully-qualified example string. <END_TASK> <USER_TASK:> Description: def example_path(cls, project, dataset, annotated_dataset, example): """Return a fully-qualified example string."""
return google.api_core.path_template.expand( "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}/examples/{example}", project=project, dataset=dataset, annotated_dataset=annotated_dataset, example=example, )
<SYSTEM_TASK:> Return a fully-qualified data_item string. <END_TASK> <USER_TASK:> Description: def data_item_path(cls, project, dataset, data_item): """Return a fully-qualified data_item string."""
return google.api_core.path_template.expand( "projects/{project}/datasets/{dataset}/dataItems/{data_item}", project=project, dataset=dataset, data_item=data_item, )
<SYSTEM_TASK:> Return a fully-qualified instruction string. <END_TASK> <USER_TASK:> Description: def instruction_path(cls, project, instruction): """Return a fully-qualified instruction string."""
return google.api_core.path_template.expand( "projects/{project}/instructions/{instruction}", project=project, instruction=instruction, )
<SYSTEM_TASK:> Exports data and annotations from dataset. <END_TASK> <USER_TASK:> Description: def export_data( self, name, annotated_dataset, output_config, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Exports data and annotations from dataset. Example: >>> from google.cloud import datalabeling_v1beta1 >>> >>> client = datalabeling_v1beta1.DataLabelingServiceClient() >>> >>> name = client.dataset_path('[PROJECT]', '[DATASET]') >>> >>> # TODO: Initialize `annotated_dataset`: >>> annotated_dataset = '' >>> >>> # TODO: Initialize `output_config`: >>> output_config = {} >>> >>> response = client.export_data(name, annotated_dataset, output_config) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Dataset resource name, format: projects/{project\_id}/datasets/{dataset\_id} annotated_dataset (str): Required. Annotated dataset resource name. DataItem in Dataset and their annotations in specified annotated dataset will be exported. It's in format of projects/{project\_id}/datasets/{dataset\_id}/annotatedDatasets/ {annotated\_dataset\_id} output_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.OutputConfig]): Required. Specify the output destination. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.OutputConfig` filter_ (str): Optional. Filter is not supported at this moment. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "export_data" not in self._inner_api_calls: self._inner_api_calls[ "export_data" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.export_data, default_retry=self._method_configs["ExportData"].retry, default_timeout=self._method_configs["ExportData"].timeout, client_info=self._client_info, ) request = data_labeling_service_pb2.ExportDataRequest( name=name, annotated_dataset=annotated_dataset, output_config=output_config, filter=filter_, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["export_data"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, proto_operations_pb2.ExportDataOperationResponse, metadata_type=proto_operations_pb2.ExportDataOperationMetadata, )
<SYSTEM_TASK:> Starts a labeling task for image. The type of image labeling task is <END_TASK> <USER_TASK:> Description: def label_image( self, parent, basic_config, feature, image_classification_config=None, bounding_poly_config=None, polyline_config=None, segmentation_config=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Starts a labeling task for image. The type of image labeling task is configured by feature in the request. Example: >>> from google.cloud import datalabeling_v1beta1 >>> from google.cloud.datalabeling_v1beta1 import enums >>> >>> client = datalabeling_v1beta1.DataLabelingServiceClient() >>> >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') >>> >>> # TODO: Initialize `basic_config`: >>> basic_config = {} >>> >>> # TODO: Initialize `feature`: >>> feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED >>> >>> response = client.label_image(parent, basic_config, feature) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. Name of the dataset to request labeling task, format: projects/{project\_id}/datasets/{dataset\_id} basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of image labeling task. image_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig]): Configuration for image classification task. One of image\_classification\_config, bounding\_poly\_config, polyline\_config and segmentation\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig` bounding_poly_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig]): Configuration for bounding box and bounding poly task. One of image\_classification\_config, bounding\_poly\_config, polyline\_config and segmentation\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig` polyline_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.PolylineConfig]): Configuration for polyline task. One of image\_classification\_config, bounding\_poly\_config, polyline\_config and segmentation\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.PolylineConfig` segmentation_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.SegmentationConfig]): Configuration for segmentation task. One of image\_classification\_config, bounding\_poly\_config, polyline\_config and segmentation\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.SegmentationConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "label_image" not in self._inner_api_calls: self._inner_api_calls[ "label_image" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.label_image, default_retry=self._method_configs["LabelImage"].retry, default_timeout=self._method_configs["LabelImage"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( image_classification_config=image_classification_config, bounding_poly_config=bounding_poly_config, polyline_config=polyline_config, segmentation_config=segmentation_config, ) request = data_labeling_service_pb2.LabelImageRequest( parent=parent, basic_config=basic_config, feature=feature, image_classification_config=image_classification_config, bounding_poly_config=bounding_poly_config, polyline_config=polyline_config, segmentation_config=segmentation_config, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["label_image"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, dataset_pb2.AnnotatedDataset, metadata_type=proto_operations_pb2.LabelOperationMetadata, )
<SYSTEM_TASK:> Starts a labeling task for video. The type of video labeling task is <END_TASK> <USER_TASK:> Description: def label_video( self, parent, basic_config, feature, video_classification_config=None, object_detection_config=None, object_tracking_config=None, event_config=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Starts a labeling task for video. The type of video labeling task is configured by feature in the request. Example: >>> from google.cloud import datalabeling_v1beta1 >>> from google.cloud.datalabeling_v1beta1 import enums >>> >>> client = datalabeling_v1beta1.DataLabelingServiceClient() >>> >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') >>> >>> # TODO: Initialize `basic_config`: >>> basic_config = {} >>> >>> # TODO: Initialize `feature`: >>> feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED >>> >>> response = client.label_video(parent, basic_config, feature) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. Name of the dataset to request labeling task, format: projects/{project\_id}/datasets/{dataset\_id} basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of video labeling task. video_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.VideoClassificationConfig]): Configuration for video classification task. One of video\_classification\_config, object\_detection\_config, object\_tracking\_config and event\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.VideoClassificationConfig` object_detection_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ObjectDetectionConfig]): Configuration for video object detection task. One of video\_classification\_config, object\_detection\_config, object\_tracking\_config and event\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.ObjectDetectionConfig` object_tracking_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ObjectTrackingConfig]): Configuration for video object tracking task. One of video\_classification\_config, object\_detection\_config, object\_tracking\_config and event\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.ObjectTrackingConfig` event_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.EventConfig]): Configuration for video event task. One of video\_classification\_config, object\_detection\_config, object\_tracking\_config and event\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.EventConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "label_video" not in self._inner_api_calls: self._inner_api_calls[ "label_video" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.label_video, default_retry=self._method_configs["LabelVideo"].retry, default_timeout=self._method_configs["LabelVideo"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( video_classification_config=video_classification_config, object_detection_config=object_detection_config, object_tracking_config=object_tracking_config, event_config=event_config, ) request = data_labeling_service_pb2.LabelVideoRequest( parent=parent, basic_config=basic_config, feature=feature, video_classification_config=video_classification_config, object_detection_config=object_detection_config, object_tracking_config=object_tracking_config, event_config=event_config, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["label_video"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, dataset_pb2.AnnotatedDataset, metadata_type=proto_operations_pb2.LabelOperationMetadata, )
<SYSTEM_TASK:> Starts a labeling task for text. The type of text labeling task is <END_TASK> <USER_TASK:> Description: def label_text( self, parent, basic_config, feature, text_classification_config=None, text_entity_extraction_config=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Starts a labeling task for text. The type of text labeling task is configured by feature in the request. Example: >>> from google.cloud import datalabeling_v1beta1 >>> from google.cloud.datalabeling_v1beta1 import enums >>> >>> client = datalabeling_v1beta1.DataLabelingServiceClient() >>> >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') >>> >>> # TODO: Initialize `basic_config`: >>> basic_config = {} >>> >>> # TODO: Initialize `feature`: >>> feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED >>> >>> response = client.label_text(parent, basic_config, feature) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. Name of the data set to request labeling task, format: projects/{project\_id}/datasets/{dataset\_id} basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of text labeling task. text_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.TextClassificationConfig]): Configuration for text classification task. One of text\_classification\_config and text\_entity\_extraction\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.TextClassificationConfig` text_entity_extraction_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.TextEntityExtractionConfig]): Configuration for entity extraction task. One of text\_classification\_config and text\_entity\_extraction\_config is required. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.TextEntityExtractionConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "label_text" not in self._inner_api_calls: self._inner_api_calls[ "label_text" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.label_text, default_retry=self._method_configs["LabelText"].retry, default_timeout=self._method_configs["LabelText"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( text_classification_config=text_classification_config, text_entity_extraction_config=text_entity_extraction_config, ) request = data_labeling_service_pb2.LabelTextRequest( parent=parent, basic_config=basic_config, feature=feature, text_classification_config=text_classification_config, text_entity_extraction_config=text_entity_extraction_config, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["label_text"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, dataset_pb2.AnnotatedDataset, metadata_type=proto_operations_pb2.LabelOperationMetadata, )
<SYSTEM_TASK:> Creates an instruction for how data should be labeled. <END_TASK> <USER_TASK:> Description: def create_instruction( self, parent, instruction, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates an instruction for how data should be labeled. Example: >>> from google.cloud import datalabeling_v1beta1 >>> >>> client = datalabeling_v1beta1.DataLabelingServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `instruction`: >>> instruction = {} >>> >>> response = client.create_instruction(parent, instruction) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. Instruction resource parent, format: projects/{project\_id} instruction (Union[dict, ~google.cloud.datalabeling_v1beta1.types.Instruction]): Required. Instruction of how to perform the labeling task. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datalabeling_v1beta1.types.Instruction` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_instruction" not in self._inner_api_calls: self._inner_api_calls[ "create_instruction" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instruction, default_retry=self._method_configs["CreateInstruction"].retry, default_timeout=self._method_configs["CreateInstruction"].timeout, client_info=self._client_info, ) request = data_labeling_service_pb2.CreateInstructionRequest( parent=parent, instruction=instruction ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_instruction"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, instruction_pb2.Instruction, metadata_type=proto_operations_pb2.CreateInstructionMetadata, )
<SYSTEM_TASK:> Value of the variable, as bytes. <END_TASK> <USER_TASK:> Description: def value(self): """Value of the variable, as bytes. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables :rtype: bytes or ``NoneType`` :returns: The value of the variable or ``None`` if the property is not set locally. """
value = self._properties.get("value") if value is not None: value = base64.b64decode(value) return value
<SYSTEM_TASK:> Retrieve the timestamp at which the variable was updated. <END_TASK> <USER_TASK:> Description: def update_time(self): """Retrieve the timestamp at which the variable was updated. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables Returns: :class:`~api_core.datetime_helpers.DatetimeWithNanoseconds`, :class:`datetime.datetime` or ``NoneType``: Datetime object parsed from RFC3339 valid timestamp, or ``None`` if the property is not set locally. Raises: ValueError: if value is not a valid RFC3339 timestamp """
value = self._properties.get("updateTime") if value is not None: try: value = datetime.datetime.strptime( value, datetime_helpers._RFC3339_MICROS ) except ValueError: DatetimeNS = datetime_helpers.DatetimeWithNanoseconds value = DatetimeNS.from_rfc3339(value) naive = value.tzinfo is None or value.tzinfo.utcoffset(value) is None if naive: value = pytz.utc.localize(value) return value
<SYSTEM_TASK:> Converts an ``Any`` protobuf to the specified message type. <END_TASK> <USER_TASK:> Description: def from_any_pb(pb_type, any_pb): """Converts an ``Any`` protobuf to the specified message type. Args: pb_type (type): the type of the message that any_pb stores an instance of. any_pb (google.protobuf.any_pb2.Any): the object to be converted. Returns: pb_type: An instance of the pb_type message. Raises: TypeError: if the message could not be converted. """
msg = pb_type() # Unwrap proto-plus wrapped messages. if callable(getattr(pb_type, "pb", None)): msg_pb = pb_type.pb(msg) else: msg_pb = msg # Unpack the Any object and populate the protobuf message instance. if not any_pb.Unpack(msg_pb): raise TypeError( "Could not convert {} to {}".format( any_pb.__class__.__name__, pb_type.__name__ ) ) # Done; return the message. return msg
<SYSTEM_TASK:> Discovers all protobuf Message classes in a given import module. <END_TASK> <USER_TASK:> Description: def get_messages(module): """Discovers all protobuf Message classes in a given import module. Args: module (module): A Python module; :func:`dir` will be run against this module to find Message subclasses. Returns: dict[str, google.protobuf.message.Message]: A dictionary with the Message class names as keys, and the Message subclasses themselves as values. """
answer = collections.OrderedDict() for name in dir(module): candidate = getattr(module, name) if inspect.isclass(candidate) and issubclass(candidate, message.Message): answer[name] = candidate return answer
<SYSTEM_TASK:> Resolve a potentially nested key. <END_TASK> <USER_TASK:> Description: def _resolve_subkeys(key, separator="."): """Resolve a potentially nested key. If the key contains the ``separator`` (e.g. ``.``) then the key will be split on the first instance of the subkey:: >>> _resolve_subkeys('a.b.c') ('a', 'b.c') >>> _resolve_subkeys('d|e|f', separator='|') ('d', 'e|f') If not, the subkey will be :data:`None`:: >>> _resolve_subkeys('foo') ('foo', None) Args: key (str): A string that may or may not contain the separator. separator (str): The namespace separator. Defaults to `.`. Returns: Tuple[str, str]: The key and subkey(s). """
parts = key.split(separator, 1) if len(parts) > 1: return parts else: return parts[0], None
<SYSTEM_TASK:> Retrieve a key's value from a protobuf Message or dictionary. <END_TASK> <USER_TASK:> Description: def get(msg_or_dict, key, default=_SENTINEL): """Retrieve a key's value from a protobuf Message or dictionary. Args: mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set then :class:`KeyError` will be raised if the key is not present in the object. Returns: Any: The return value from the underlying Message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If ``msg_or_dict`` is not a Message or Mapping. """
# We may need to get a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # Attempt to get the value from the two types of objects we know about. # If we get something else, complain. if isinstance(msg_or_dict, message.Message): answer = getattr(msg_or_dict, key, default) elif isinstance(msg_or_dict, collections_abc.Mapping): answer = msg_or_dict.get(key, default) else: raise TypeError( "get() expected a dict or protobuf message, got {!r}.".format( type(msg_or_dict) ) ) # If the object we got back is our sentinel, raise KeyError; this is # a "not found" case. if answer is _SENTINEL: raise KeyError(key) # If a subkey exists, call this method recursively against the answer. if subkey is not None and answer is not default: return get(answer, subkey, default=default) return answer
<SYSTEM_TASK:> Set a key's value on a protobuf Message or dictionary. <END_TASK> <USER_TASK:> Description: def set(msg_or_dict, key, value): """Set a key's value on a protobuf Message or dictionary. Args: msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to set. value (Any): The value to set. Raises: TypeError: If ``msg_or_dict`` is not a Message or dictionary. """
# Sanity check: Is our target object valid? if not isinstance(msg_or_dict, (collections_abc.MutableMapping, message.Message)): raise TypeError( "set() expected a dict or protobuf message, got {!r}.".format( type(msg_or_dict) ) ) # We may be setting a nested key. Resolve this. basekey, subkey = _resolve_subkeys(key) # If a subkey exists, then get that object and call this method # recursively against it using the subkey. if subkey is not None: if isinstance(msg_or_dict, collections_abc.MutableMapping): msg_or_dict.setdefault(basekey, {}) set(get(msg_or_dict, basekey), subkey, value) return if isinstance(msg_or_dict, collections_abc.MutableMapping): msg_or_dict[key] = value else: _set_field_on_message(msg_or_dict, key, value)
<SYSTEM_TASK:> Set the key on a protobuf Message or dictionary to a given value if the <END_TASK> <USER_TASK:> Description: def setdefault(msg_or_dict, key, value): """Set the key on a protobuf Message or dictionary to a given value if the current value is falsy. Because protobuf Messages do not distinguish between unset values and falsy ones particularly well (by design), this method treats any falsy value (e.g. 0, empty list) as a target to be overwritten, on both Messages and dictionaries. Args: msg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If ``msg_or_dict`` is not a Message or dictionary. """
if not get(msg_or_dict, key, default=None): set(msg_or_dict, key, value)
<SYSTEM_TASK:> Create a field mask by comparing two messages. <END_TASK> <USER_TASK:> Description: def field_mask(original, modified): """Create a field mask by comparing two messages. Args: original (~google.protobuf.message.Message): the original message. If set to None, this field will be interpretted as an empty message. modified (~google.protobuf.message.Message): the modified message. If set to None, this field will be interpretted as an empty message. Returns: google.protobuf.field_mask_pb2.FieldMask: field mask that contains the list of field names that have different values between the two messages. If the messages are equivalent, then the field mask is empty. Raises: ValueError: If the ``original`` or ``modified`` are not the same type. """
if original is None and modified is None: return field_mask_pb2.FieldMask() if original is None and modified is not None: original = copy.deepcopy(modified) original.Clear() if modified is None and original is not None: modified = copy.deepcopy(original) modified.Clear() if type(original) != type(modified): raise ValueError( "expected that both original and modified should be of the " 'same type, received "{!r}" and "{!r}".'.format( type(original), type(modified) ) ) return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
<SYSTEM_TASK:> Return a fully-qualified topic string. <END_TASK> <USER_TASK:> Description: def topic_path(cls, project, topic): """Return a fully-qualified topic string."""
return google.api_core.path_template.expand( "projects/{project}/topics/{topic}", project=project, topic=topic )
<SYSTEM_TASK:> Creates a subscription to a given topic. See the resource name rules. If <END_TASK> <USER_TASK:> Description: def create_subscription( self, name, topic, push_config=None, ack_deadline_seconds=None, retain_acked_messages=None, message_retention_duration=None, labels=None, enable_message_ordering=None, expiration_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a subscription to a given topic. See the resource name rules. If the subscription already exists, returns ``ALREADY_EXISTS``. If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the `resource name format <https://cloud.google.com/pubsub/docs/admin#resource_names>`__. The generated name is populated in the returned Subscription object. Note that for REST API requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.SubscriberClient() >>> >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') >>> >>> response = client.create_subscription(name, topic) Args: name (str): The name of the subscription. It must have the format `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must start with a letter, and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"` topic (str): The name of the topic from which this subscription is receiving messages. Format is ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API methods. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is considered to be outstanding. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using non-streaming pull or send the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. For push delivery, this value is also used to set the request timeout for the call to the push endpoint. If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message. retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` window. This must be true if you would like to Seek to a timestamp. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` labels (dict[str -> str]): See <a href="https://cloud.google.com/pubsub/docs/labels"> Creating and managing labels</a>. enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in ``PubsubMessage`` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order. EXPERIMENTAL: This feature is part of a closed alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_subscription" not in self._inner_api_calls: self._inner_api_calls[ "create_subscription" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_subscription, default_retry=self._method_configs["CreateSubscription"].retry, default_timeout=self._method_configs["CreateSubscription"].timeout, client_info=self._client_info, ) request = pubsub_pb2.Subscription( name=name, topic=topic, push_config=push_config, ack_deadline_seconds=ack_deadline_seconds, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, labels=labels, enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_subscription"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Completes the specified prefix with keyword suggestions. <END_TASK> <USER_TASK:> Description: def complete_query( self, name, query, page_size, language_codes=None, company_name=None, scope=None, type_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Completes the specified prefix with keyword suggestions. Intended for use by a job search auto-complete search box. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompletionClient() >>> >>> name = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `query`: >>> query = '' >>> >>> # TODO: Initialize `page_size`: >>> page_size = 0 >>> >>> response = client.complete_query(name, query, page_size) Args: name (str): Required. Resource name of project the completion is performed within. The format is "projects/{project\_id}", for example, "projects/api-test-project". query (str): Required. The query used to generate suggestions. The maximum number of allowed characters is 255. page_size (int): Required. Completion result count. The maximum allowed page size is 10. language_codes (list[str]): Optional. The list of languages of the query. This is the BCP-47 language code, such as "en-US" or "sr-Latn". For more information, see `Tags for Identifying Languages <https://tools.ietf.org/html/bcp47>`__. For ``CompletionType.JOB_TITLE`` type, only open jobs with the same ``language_codes`` are returned. For ``CompletionType.COMPANY_NAME`` type, only companies having open jobs with the same ``language_codes`` are returned. For ``CompletionType.COMBINED`` type, only open jobs with the same ``language_codes`` or companies having open jobs with the same ``language_codes`` are returned. The maximum number of allowed characters is 255. company_name (str): Optional. If provided, restricts completion to specified company. The format is "projects/{project\_id}/companies/{company\_id}", for example, "projects/api-test-project/companies/foo". scope (~google.cloud.talent_v4beta1.types.CompletionScope): Optional. The scope of the completion. The defaults is ``CompletionScope.PUBLIC``. type_ (~google.cloud.talent_v4beta1.types.CompletionType): Optional. The completion topic. The default is ``CompletionType.COMBINED``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.CompleteQueryResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "complete_query" not in self._inner_api_calls: self._inner_api_calls[ "complete_query" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.complete_query, default_retry=self._method_configs["CompleteQuery"].retry, default_timeout=self._method_configs["CompleteQuery"].timeout, client_info=self._client_info, ) request = completion_service_pb2.CompleteQueryRequest( name=name, query=query, page_size=page_size, language_codes=language_codes, company_name=company_name, scope=scope, type=type_, ) return self._inner_api_calls["complete_query"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Helper for concrete methods creating session instances. <END_TASK> <USER_TASK:> Description: def _new_session(self): """Helper for concrete methods creating session instances. :rtype: :class:`~google.cloud.spanner_v1.session.Session` :returns: new session instance. """
if self.labels: return self._database.session(labels=self.labels) return self._database.session()
<SYSTEM_TASK:> Delete all sessions in the pool. <END_TASK> <USER_TASK:> Description: def clear(self): """Delete all sessions in the pool."""
while True: try: session = self._sessions.get(block=False) except queue.Empty: break else: session.delete()
<SYSTEM_TASK:> Refresh maybe-expired sessions in the pool. <END_TASK> <USER_TASK:> Description: def ping(self): """Refresh maybe-expired sessions in the pool. This method is designed to be called from a background thread, or during the "idle" phase of an event loop. """
while True: try: ping_after, session = self._sessions.get(block=False) except queue.Empty: # all sessions in use break if ping_after > _NOW(): # oldest session is fresh # Re-add to queue with existing expiration self._sessions.put((ping_after, session)) break if not session.exists(): # stale session = self._new_session() session.create() # Re-add to queue with new expiration self.put(session)
<SYSTEM_TASK:> Begin all transactions for sessions added to the pool. <END_TASK> <USER_TASK:> Description: def begin_pending_transactions(self): """Begin all transactions for sessions added to the pool."""
while not self._pending_sessions.empty(): session = self._pending_sessions.get() session._transaction.begin() super(TransactionPingingPool, self).put(session)
<SYSTEM_TASK:> Attach a logging handler to the Python root logger <END_TASK> <USER_TASK:> Description: def setup_logging( handler, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO ): """Attach a logging handler to the Python root logger Excludes loggers that this library itself uses to avoid infinite recursion. :type handler: :class:`logging.handler` :param handler: the handler to attach to the global handler :type excluded_loggers: tuple :param excluded_loggers: (Optional) The loggers to not attach the handler to. This will always include the loggers in the path of the logging client itself. :type log_level: int :param log_level: (Optional) Python logging log level. Defaults to :const:`logging.INFO`. Example: .. code-block:: python import logging import google.cloud.logging from google.cloud.logging.handlers import CloudLoggingHandler client = google.cloud.logging.Client() handler = CloudLoggingHandler(client) google.cloud.logging.handlers.setup_logging(handler) logging.getLogger().setLevel(logging.DEBUG) logging.error('bad news') # API call """
all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS) logger = logging.getLogger() logger.setLevel(log_level) logger.addHandler(handler) logger.addHandler(logging.StreamHandler()) for logger_name in all_excluded_loggers: logger = logging.getLogger(logger_name) logger.propagate = False logger.addHandler(logging.StreamHandler())
<SYSTEM_TASK:> Format the message into JSON expected by fluentd. <END_TASK> <USER_TASK:> Description: def format(self, record): """Format the message into JSON expected by fluentd. :type record: :class:`~logging.LogRecord` :param record: the log record :rtype: str :returns: A JSON string formatted for GKE fluentd. """
message = super(ContainerEngineHandler, self).format(record) return format_stackdriver_json(record, message)
<SYSTEM_TASK:> Analyzes the sentiment of the provided text. <END_TASK> <USER_TASK:> Description: def analyze_sentiment( self, document, encoding_type=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Analyzes the sentiment of the provided text. Example: >>> from google.cloud import language_v1 >>> >>> client = language_v1.LanguageServiceClient() >>> >>> # TODO: Initialize `document`: >>> document = {} >>> >>> response = client.analyze_sentiment(document) Args: document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1.types.Document` encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate sentence offsets. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.language_v1.types.AnalyzeSentimentResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "analyze_sentiment" not in self._inner_api_calls: self._inner_api_calls[ "analyze_sentiment" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.analyze_sentiment, default_retry=self._method_configs["AnalyzeSentiment"].retry, default_timeout=self._method_configs["AnalyzeSentiment"].timeout, client_info=self._client_info, ) request = language_service_pb2.AnalyzeSentimentRequest( document=document, encoding_type=encoding_type ) return self._inner_api_calls["analyze_sentiment"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Sleep and produce a new sleep time. <END_TASK> <USER_TASK:> Description: def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER): """Sleep and produce a new sleep time. .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\ 2015/03/backoff.html Select a duration between zero and ``current_sleep``. It might seem counterintuitive to have so much jitter, but `Exponential Backoff And Jitter`_ argues that "full jitter" is the best strategy. Args: current_sleep (float): The current "max" for sleep interval. max_sleep (Optional[float]): Eventual "max" sleep time multiplier (Optional[float]): Multiplier for exponential backoff. Returns: float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever is smaller) """
actual_sleep = random.uniform(0.0, current_sleep) time.sleep(actual_sleep) return min(multiplier * current_sleep, max_sleep)
<SYSTEM_TASK:> Add `Write`` protobufs to this transaction. <END_TASK> <USER_TASK:> Description: def _add_write_pbs(self, write_pbs): """Add `Write`` protobufs to this transaction. Args: write_pbs (List[google.cloud.proto.firestore.v1beta1.\ write_pb2.Write]): A list of write protobufs to be added. Raises: ValueError: If this transaction is read-only. """
if self._read_only: raise ValueError(_WRITE_READ_ONLY) super(Transaction, self)._add_write_pbs(write_pbs)
<SYSTEM_TASK:> Begin the transaction. <END_TASK> <USER_TASK:> Description: def _begin(self, retry_id=None): """Begin the transaction. Args: retry_id (Optional[bytes]): Transaction ID of a transaction to be retried. Raises: ValueError: If the current transaction has already begun. """
if self.in_progress: msg = _CANT_BEGIN.format(self._id) raise ValueError(msg) transaction_response = self._client._firestore_api.begin_transaction( self._client._database_string, options_=self._options_protobuf(retry_id), metadata=self._client._rpc_metadata, ) self._id = transaction_response.transaction
<SYSTEM_TASK:> Roll back the transaction. <END_TASK> <USER_TASK:> Description: def _rollback(self): """Roll back the transaction. Raises: ValueError: If no transaction is in progress. """
if not self.in_progress: raise ValueError(_CANT_ROLLBACK) try: # NOTE: The response is just ``google.protobuf.Empty``. self._client._firestore_api.rollback( self._client._database_string, self._id, metadata=self._client._rpc_metadata, ) finally: self._clean_up()
<SYSTEM_TASK:> Transactionally commit the changes accumulated. <END_TASK> <USER_TASK:> Description: def _commit(self): """Transactionally commit the changes accumulated. Returns: List[google.cloud.proto.firestore.v1beta1.\ write_pb2.WriteResult, ...]: The write results corresponding to the changes committed, returned in the same order as the changes were applied to this transaction. A write result contains an ``update_time`` field. Raises: ValueError: If no transaction is in progress. """
if not self.in_progress: raise ValueError(_CANT_COMMIT) commit_response = _commit_with_retry(self._client, self._write_pbs, self._id) self._clean_up() return list(commit_response.write_results)
<SYSTEM_TASK:> Begin transaction and call the wrapped callable. <END_TASK> <USER_TASK:> Description: def _pre_commit(self, transaction, *args, **kwargs): """Begin transaction and call the wrapped callable. If the callable raises an exception, the transaction will be rolled back. If not, the transaction will be "ready" for ``Commit`` (i.e. it will have staged writes). Args: transaction (~.firestore_v1beta1.transaction.Transaction): A transaction to execute the callable within. args (Tuple[Any, ...]): The extra positional arguments to pass along to the wrapped callable. kwargs (Dict[str, Any]): The extra keyword arguments to pass along to the wrapped callable. Returns: Any: result of the wrapped callable. Raises: Exception: Any failure caused by ``to_wrap``. """
# Force the ``transaction`` to be not "in progress". transaction._clean_up() transaction._begin(retry_id=self.retry_id) # Update the stored transaction IDs. self.current_id = transaction._id if self.retry_id is None: self.retry_id = self.current_id try: return self.to_wrap(transaction, *args, **kwargs) except: # noqa # NOTE: If ``rollback`` fails this will lose the information # from the original failure. transaction._rollback() raise
<SYSTEM_TASK:> Try to commit the transaction. <END_TASK> <USER_TASK:> Description: def _maybe_commit(self, transaction): """Try to commit the transaction. If the transaction is read-write and the ``Commit`` fails with the ``ABORTED`` status code, it will be retried. Any other failure will not be caught. Args: transaction (~.firestore_v1beta1.transaction.Transaction): The transaction to be ``Commit``-ed. Returns: bool: Indicating if the commit succeeded. """
try: transaction._commit() return True except exceptions.GoogleAPICallError as exc: if transaction._read_only: raise if isinstance(exc, exceptions.Aborted): # If a read-write transaction returns ABORTED, retry. return False else: raise
<SYSTEM_TASK:> Creates and returns a new profile. <END_TASK> <USER_TASK:> Description: def create_profile( self, parent, profile, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates and returns a new profile. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ProfileServiceClient() >>> >>> parent = client.tenant_path('[PROJECT]', '[TENANT]') >>> >>> # TODO: Initialize `profile`: >>> profile = {} >>> >>> response = client.create_profile(parent, profile) Args: parent (str): Required. The name of the tenant this profile belongs to. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, "projects/api-test-project/tenants/foo". profile (Union[dict, ~google.cloud.talent_v4beta1.types.Profile]): Required. The profile to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Profile` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Profile` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_profile" not in self._inner_api_calls: self._inner_api_calls[ "create_profile" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_profile, default_retry=self._method_configs["CreateProfile"].retry, default_timeout=self._method_configs["CreateProfile"].timeout, client_info=self._client_info, ) request = profile_service_pb2.CreateProfileRequest( parent=parent, profile=profile ) return self._inner_api_calls["create_profile"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Gets the specified profile. <END_TASK> <USER_TASK:> Description: def get_profile( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the specified profile. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ProfileServiceClient() >>> >>> name = client.profile_path('[PROJECT]', '[TENANT]', '[PROFILE]') >>> >>> response = client.get_profile(name) Args: name (str): Required. Resource name of the profile to get. The format is "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", for example, "projects/api-test-project/tenants/foo/profiles/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Profile` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "get_profile" not in self._inner_api_calls: self._inner_api_calls[ "get_profile" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_profile, default_retry=self._method_configs["GetProfile"].retry, default_timeout=self._method_configs["GetProfile"].timeout, client_info=self._client_info, ) request = profile_service_pb2.GetProfileRequest(name=name) return self._inner_api_calls["get_profile"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Updates the specified profile and returns the updated result. <END_TASK> <USER_TASK:> Description: def update_profile( self, profile, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates the specified profile and returns the updated result. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ProfileServiceClient() >>> >>> # TODO: Initialize `profile`: >>> profile = {} >>> >>> response = client.update_profile(profile) Args: profile (Union[dict, ~google.cloud.talent_v4beta1.types.Profile]): Required. Profile to be updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Profile` update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional. A field mask to specify the profile fields to update. A full update is performed if it is unset. Valid values are: - externalId - source - uri - isHirable - createTime - updateTime - resumeHrxml - personNames - addresses - emailAddresses - phoneNumbers - personalUris - additionalContactInfo - employmentRecords - educationRecords - skills - projects - publications - patents - certifications - jobApplications - recruitingNotes - customAttributes If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Profile` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "update_profile" not in self._inner_api_calls: self._inner_api_calls[ "update_profile" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_profile, default_retry=self._method_configs["UpdateProfile"].retry, default_timeout=self._method_configs["UpdateProfile"].timeout, client_info=self._client_info, ) request = profile_service_pb2.UpdateProfileRequest( profile=profile, update_mask=update_mask ) return self._inner_api_calls["update_profile"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Searches for profiles within a tenant. <END_TASK> <USER_TASK:> Description: def search_profiles( self, parent, request_metadata, profile_query=None, page_size=None, offset=None, disable_spell_check=None, order_by=None, case_sensitive_sort=None, histogram_queries=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Searches for profiles within a tenant. For example, search by raw queries "software engineer in Mountain View" or search by structured filters (location filter, education filter, etc.). See ``SearchProfilesRequest`` for more information. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ProfileServiceClient() >>> >>> parent = client.tenant_path('[PROJECT]', '[TENANT]') >>> >>> # TODO: Initialize `request_metadata`: >>> request_metadata = {} >>> >>> # Iterate over all results >>> for element in client.search_profiles(parent, request_metadata): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.search_profiles(parent, request_metadata).pages: ... for element in page: ... # process element ... pass Args: parent (str): Required. The resource name of the tenant to search within. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, "projects/api-test-project/tenants/foo". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the profile search user. This is used to improve the search quality of the service. These values are provided by users, and must be precise and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` profile_query (Union[dict, ~google.cloud.talent_v4beta1.types.ProfileQuery]): Optional. Search query to execute. See ``ProfileQuery`` for more details. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ProfileQuery` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. offset (int): Optional. An integer that specifies the current offset (that is, starting result) in search results. This field is only considered if ``page_token`` is unset. The maximum allowed value is 5000. Otherwise an error is thrown. For example, 0 means to search from the first profile, and 10 means to search from the 11th profile. This can be used for pagination, for example pageSize = 10 and offset = 10 means to search from the second page. disable_spell_check (bool): Optional. This flag controls the spell-check feature. If ``false``, the service attempts to correct a misspelled query. For example, "enginee" is corrected to "engineer". order_by (str): Optional. The criteria that determines how search results are sorted. Defaults is "relevance desc" if no value is specified. Supported options are: - "relevance desc": By descending relevance, as determined by the API algorithms. - "update\_date desc": Sort by ``Profile.update_date`` in descending order (recently updated profiles first). - "create\_date desc": Sort by ``Profile.create_date`` in descending order (recently created profiles first). - "first\_name": Sort by ``PersonStrcuturedName.given_name`` in ascending order. - "first\_name desc": Sort by ``PersonStrcuturedName.given_name`` in descending order. - "last\_name": Sort by ``PersonStrcuturedName.family_name`` in ascending order. - "last\_name desc": Sort by ``PersonStrcuturedName.family_name`` in ascending order. case_sensitive_sort (bool): Optional. When sort by field is based on alphabetical order, sort values case sensitively (based on ASCII) when the value is set to true. Default value is case in-sensitive sort (false). histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. A list of expressions specifies histogram requests against matching profiles for ``SearchProfilesRequest``. The expression syntax looks like a function definition with optional parameters. Function syntax: function\_name(histogram\_facet[, list of buckets]) Data types: - Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any string with backslash escape for quote(")." - Number: whole number and floating point number like 10, -1 and -0.01. - List: list of elements with comma(,) separator surrounded by square brackets. For example, [1, 2, 3] and ["one", "two", "three"]. Built-in constants: - MIN (minimum number similar to java Double.MIN\_VALUE) - MAX (maximum number similar to java Double.MAX\_VALUE) Built-in functions: - bucket(start, end[, label]) Bucket build-in function creates a bucket with range of \`start, end). Note that the end is exclusive. For example, bucket(1, MAX, "positive number") or bucket(1, 10). Histogram Facets: - admin1: Admin1 is a global placeholder for referring to state, province, or the particular term a country uses to define the geographic structure below the country level. Examples include states codes such as "CA", "IL", "NY", and provinces, such as "BC". - locality: Locality is a global placeholder for referring to city, town, or the particular term a country uses to define the geographic structure below the admin1 level. Examples include city names such as "Mountain View" and "New York". - extended\_locality: Extended locality is concatenated version of admin1 and locality with comma separator. For example, "Mountain View, CA" and "New York, NY". - postal\_code: Postal code of profile which follows locale code. - country: Country code (ISO-3166-1 alpha-2 code) of profile, such as US, JP, GB. - job\_title: Normalized job titles specified in EmploymentHistory. - company\_name: Normalized company name of profiles to match on. - institution: The school name. For example, "MIT", "University of California, Berkeley" - degree: Highest education degree in ISCED code. Each value in degree covers specific level of education, without any expansion to upper nor lower levels of education degree. - experience\_in\_months: experience in months. 0 means 0 month to 1 month (exclusive). - application\_date: The application date specifies application start dates. See [ApplicationDateFilter\` for more details. - application\_outcome\_reason: The application outcome reason specifies the outcome reasons of job application. See ``ApplicationOutcomeReasonFilter`` for more details. - application\_last\_stage: The application last stage specifies the last stage of job application. See ``ApplicationLastStageFilter`` for more details. - application\_job\_title: The application job title specifies the job applied for in the application. See ``ApplicationJobFilter`` for more details. - application\_status: The application status specifies the status of job application. See ``ApplicationStatusFilter`` for more details. - hirable\_status: Hirable status specifies the profile's hirable status. - string\_custom\_attribute: String custom attributes. Values can be accessed via square bracket notation like string\_custom\_attribute["key1"]. - numeric\_custom\_attribute: Numeric custom attributes. Values can be accessed via square bracket notation like numeric\_custom\_attribute["key1"]. Example expressions: - count(admin1) - count(experience\_in\_months, [bucket(0, 12, "1 year"), bucket(12, 36, "1-3 years"), bucket(36, MAX, "3+ years")]) - count(string\_custom\_attribute["assigned\_recruiter"]) - count(numeric\_custom\_attribute["favorite\_number"], [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative")]) If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.talent_v4beta1.types.HistogramQueryResult` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "search_profiles" not in self._inner_api_calls: self._inner_api_calls[ "search_profiles" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.search_profiles, default_retry=self._method_configs["SearchProfiles"].retry, default_timeout=self._method_configs["SearchProfiles"].timeout, client_info=self._client_info, ) request = profile_service_pb2.SearchProfilesRequest( parent=parent, request_metadata=request_metadata, profile_query=profile_query, page_size=page_size, offset=offset, disable_spell_check=disable_spell_check, order_by=order_by, case_sensitive_sort=case_sensitive_sort, histogram_queries=histogram_queries, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["search_profiles"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="histogram_query_results", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
<SYSTEM_TASK:> Factory to retrieve JSON credentials while creating client. <END_TASK> <USER_TASK:> Description: def from_service_account_json(cls, json_credentials_path, *args, **kwargs): """Factory to retrieve JSON credentials while creating client. :type json_credentials_path: str :param json_credentials_path: The path to a private key file (this file was given to you when you created the service account). This file must contain a JSON object with a private key and other credentials information (downloaded from the Google APIs console). :type args: tuple :param args: Remaining positional arguments to pass to constructor. :type kwargs: dict :param kwargs: Remaining keyword arguments to pass to constructor. :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """
if "credentials" in kwargs: raise TypeError("credentials must not be in keyword arguments") with io.open(json_credentials_path, "r", encoding="utf-8") as json_fi: credentials_info = json.load(json_fi) credentials = service_account.Credentials.from_service_account_info( credentials_info ) if cls._SET_PROJECT: if "project" not in kwargs: kwargs["project"] = credentials_info.get("project_id") kwargs["credentials"] = credentials return cls(*args, **kwargs)
<SYSTEM_TASK:> Return a fully-qualified alert_policy string. <END_TASK> <USER_TASK:> Description: def alert_policy_path(cls, project, alert_policy): """Return a fully-qualified alert_policy string."""
return google.api_core.path_template.expand( "projects/{project}/alertPolicies/{alert_policy}", project=project, alert_policy=alert_policy, )
<SYSTEM_TASK:> Return a fully-qualified alert_policy_condition string. <END_TASK> <USER_TASK:> Description: def alert_policy_condition_path(cls, project, alert_policy, condition): """Return a fully-qualified alert_policy_condition string."""
return google.api_core.path_template.expand( "projects/{project}/alertPolicies/{alert_policy}/conditions/{condition}", project=project, alert_policy=alert_policy, condition=condition, )
<SYSTEM_TASK:> Maps BigQuery error reasons to an exception. <END_TASK> <USER_TASK:> Description: def _error_result_to_exception(error_result): """Maps BigQuery error reasons to an exception. The reasons and their matching HTTP status codes are documented on the `troubleshooting errors`_ page. .. _troubleshooting errors: https://cloud.google.com/bigquery\ /troubleshooting-errors :type error_result: Mapping[str, str] :param error_result: The error result from BigQuery. :rtype google.cloud.exceptions.GoogleCloudError: :returns: The mapped exception. """
reason = error_result.get("reason") status_code = _ERROR_REASON_TO_EXCEPTION.get( reason, http_client.INTERNAL_SERVER_ERROR ) return exceptions.from_http_status( status_code, error_result.get("message", ""), errors=[error_result] )
<SYSTEM_TASK:> Datetime at which the job was created. <END_TASK> <USER_TASK:> Description: def created(self): """Datetime at which the job was created. :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the creation time (None until set from the server). """
statistics = self._properties.get("statistics") if statistics is not None: millis = statistics.get("creationTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0)
<SYSTEM_TASK:> Datetime at which the job was started. <END_TASK> <USER_TASK:> Description: def started(self): """Datetime at which the job was started. :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the start time (None until set from the server). """
statistics = self._properties.get("statistics") if statistics is not None: millis = statistics.get("startTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0)
<SYSTEM_TASK:> Datetime at which the job finished. <END_TASK> <USER_TASK:> Description: def ended(self): """Datetime at which the job finished. :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the end time (None until set from the server). """
statistics = self._properties.get("statistics") if statistics is not None: millis = statistics.get("endTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0)
<SYSTEM_TASK:> Helper for job-type specific statistics-based properties. <END_TASK> <USER_TASK:> Description: def _job_statistics(self): """Helper for job-type specific statistics-based properties."""
statistics = self._properties.get("statistics", {}) return statistics.get(self._JOB_TYPE, {})
<SYSTEM_TASK:> Set the result or exception from the job if it is complete. <END_TASK> <USER_TASK:> Description: def _set_future_result(self): """Set the result or exception from the job if it is complete."""
# This must be done in a lock to prevent the polling thread # and main thread from both executing the completion logic # at the same time. with self._completion_lock: # If the operation isn't complete or if the result has already been # set, do not call set_result/set_exception again. # Note: self._result_set is set to True in set_result and # set_exception, in case those methods are invoked directly. if self.state != _DONE_STATE or self._result_set: return if self.error_result is not None: exception = _error_result_to_exception(self.error_result) self.set_exception(exception) else: self.set_result(self)
<SYSTEM_TASK:> Merge this job config with a default job config. <END_TASK> <USER_TASK:> Description: def _fill_from_default(self, default_job_config): """Merge this job config with a default job config. The keys in this object take precedence over the keys in the default config. The merge is done at the top-level as well as for keys one level below the job type. Arguments: default_job_config (google.cloud.bigquery.job._JobConfig): The default job config that will be used to fill in self. Returns: google.cloud.bigquery.job._JobConfig A new (merged) job config. """
if self._job_type != default_job_config._job_type: raise TypeError( "attempted to merge two incompatible job types: " + repr(self._job_type) + ", " + repr(default_job_config._job_type) ) new_job_config = self.__class__() default_job_properties = copy.deepcopy(default_job_config._properties) for key in self._properties: if key != self._job_type: default_job_properties[key] = self._properties[key] default_job_properties[self._job_type].update(self._properties[self._job_type]) new_job_config._properties = default_job_properties return new_job_config
<SYSTEM_TASK:> Return file counts from job statistics, if present. <END_TASK> <USER_TASK:> Description: def destination_uri_file_counts(self): """Return file counts from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.extract.destinationUriFileCounts Returns: a list of integer counts, each representing the number of files per destination URI or URI pattern specified in the extract configuration. These values will be in the same order as the URIs specified in the 'destinationUris' field. Returns None if job is not yet complete. """
counts = self._job_statistics().get("destinationUriFileCounts") if counts is not None: return [int(count) for count in counts] return None
<SYSTEM_TASK:> Build an API representation of the query job config. <END_TASK> <USER_TASK:> Description: def to_api_repr(self): """Build an API representation of the query job config. Returns: dict: A dictionary in the format used by the BigQuery API. """
resource = copy.deepcopy(self._properties) # Query parameters have an addition property associated with them # to indicate if the query is using named or positional parameters. query_parameters = resource["query"].get("queryParameters") if query_parameters: if query_parameters[0].get("name") is None: resource["query"]["parameterMode"] = "POSITIONAL" else: resource["query"]["parameterMode"] = "NAMED" return resource
<SYSTEM_TASK:> Return query plan from job statistics, if present. <END_TASK> <USER_TASK:> Description: def query_plan(self): """Return query plan from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.queryPlan :rtype: list of :class:`QueryPlanEntry` :returns: mappings describing the query plan, or an empty list if the query has not yet completed. """
plan_entries = self._job_statistics().get("queryPlan", ()) return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries]
<SYSTEM_TASK:> Return total bytes processed from job statistics, if present. <END_TASK> <USER_TASK:> Description: def total_bytes_processed(self): """Return total bytes processed from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesProcessed :rtype: int or None :returns: total bytes processed by the job, or None if job is not yet complete. """
result = self._job_statistics().get("totalBytesProcessed") if result is not None: result = int(result) return result
<SYSTEM_TASK:> Return total bytes billed from job statistics, if present. <END_TASK> <USER_TASK:> Description: def total_bytes_billed(self): """Return total bytes billed from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesBilled :rtype: int or None :returns: total bytes processed by the job, or None if job is not yet complete. """
result = self._job_statistics().get("totalBytesBilled") if result is not None: result = int(result) return result
<SYSTEM_TASK:> Return the number of DML rows affected by the job. <END_TASK> <USER_TASK:> Description: def num_dml_affected_rows(self): """Return the number of DML rows affected by the job. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.numDmlAffectedRows :rtype: int or None :returns: number of DML rows affected by the job, or None if job is not yet complete. """
result = self._job_statistics().get("numDmlAffectedRows") if result is not None: result = int(result) return result
<SYSTEM_TASK:> Return referenced tables from job statistics, if present. <END_TASK> <USER_TASK:> Description: def referenced_tables(self): """Return referenced tables from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables :rtype: list of dict :returns: mappings describing the query plan, or an empty list if the query has not yet completed. """
tables = [] datasets_by_project_name = {} for table in self._job_statistics().get("referencedTables", ()): t_project = table["projectId"] ds_id = table["datasetId"] t_dataset = datasets_by_project_name.get((t_project, ds_id)) if t_dataset is None: t_dataset = DatasetReference(t_project, ds_id) datasets_by_project_name[(t_project, ds_id)] = t_dataset t_name = table["tableId"] tables.append(t_dataset.table(t_name)) return tables
<SYSTEM_TASK:> Return undeclared query parameters from job statistics, if present. <END_TASK> <USER_TASK:> Description: def undeclared_query_parameters(self): """Return undeclared query parameters from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.undeclaredQueryParameters :rtype: list of :class:`~google.cloud.bigquery.ArrayQueryParameter`, :class:`~google.cloud.bigquery.ScalarQueryParameter`, or :class:`~google.cloud.bigquery.StructQueryParameter` :returns: undeclared parameters, or an empty list if the query has not yet completed. """
parameters = [] undeclared = self._job_statistics().get("undeclaredQueryParameters", ()) for parameter in undeclared: p_type = parameter["parameterType"] if "arrayType" in p_type: klass = ArrayQueryParameter elif "structTypes" in p_type: klass = StructQueryParameter else: klass = ScalarQueryParameter parameters.append(klass.from_api_repr(parameter)) return parameters
<SYSTEM_TASK:> Return the estimated number of bytes processed by the query. <END_TASK> <USER_TASK:> Description: def estimated_bytes_processed(self): """Return the estimated number of bytes processed by the query. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.estimatedBytesProcessed :rtype: int or None :returns: number of DML rows affected by the job, or None if job is not yet complete. """
result = self._job_statistics().get("estimatedBytesProcessed") if result is not None: result = int(result) return result
<SYSTEM_TASK:> Return a pandas DataFrame from a QueryJob <END_TASK> <USER_TASK:> Description: def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=None): """Return a pandas DataFrame from a QueryJob Args: bqstorage_client ( \ google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ ): **Alpha Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. This method requires the ``fastavro`` and ``google-cloud-bigquery-storage`` libraries. Reading from a specific partition or snapshot is not currently supported by this method. **Caution**: There is a known issue reading small anonymous query result tables with the BQ Storage API. Write your query results to a destination table to work around this issue. dtypes ( \ Map[str, Union[str, pandas.Series.dtype]] \ ): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior is used. progress_bar_type (Optional[str]): If set, use the `tqdm <https://tqdm.github.io/>`_ library to display a progress bar while the data downloads. Install the ``tqdm`` package to use this feature. See :func:`~google.cloud.bigquery.table.RowIterator.to_dataframe` for details. ..versionadded:: 1.11.0 Returns: A :class:`~pandas.DataFrame` populated with row data and column headers from the query results. The column headers are derived from the destination table's schema. Raises: ValueError: If the `pandas` library cannot be imported. """
return self.result().to_dataframe( bqstorage_client=bqstorage_client, dtypes=dtypes, progress_bar_type=progress_bar_type, )
<SYSTEM_TASK:> Construct an UnknownJob from the JSON representation. <END_TASK> <USER_TASK:> Description: def from_api_repr(cls, resource, client): """Construct an UnknownJob from the JSON representation. Args: resource (dict): JSON representation of a job. client (google.cloud.bigquery.client.Client): Client connected to BigQuery API. Returns: UnknownJob: Job corresponding to the resource. """
job_ref_properties = resource.get("jobReference", {"projectId": client.project}) job_ref = _JobReference._from_api_repr(job_ref_properties) job = cls(job_ref, client) # Populate the job reference with the project, even if it has been # redacted, because we know it should equal that of the request. resource["jobReference"] = job_ref_properties job._properties = resource return job
<SYSTEM_TASK:> Update description of the zone. <END_TASK> <USER_TASK:> Description: def description(self, value): """Update description of the zone. :type value: str :param value: (Optional) new description :raises: ValueError for invalid value types. """
if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") self._properties["description"] = value
<SYSTEM_TASK:> Update named set of DNS name servers. <END_TASK> <USER_TASK:> Description: def name_server_set(self, value): """Update named set of DNS name servers. :type value: str :param value: (Optional) new title :raises: ValueError for invalid value types. """
if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") self._properties["nameServerSet"] = value
<SYSTEM_TASK:> Construct a resource record set bound to this zone. <END_TASK> <USER_TASK:> Description: def resource_record_set(self, name, record_type, ttl, rrdatas): """Construct a resource record set bound to this zone. :type name: str :param name: Name of the record set. :type record_type: str :param record_type: RR type :type ttl: int :param ttl: TTL for the RR, in seconds :type rrdatas: list of string :param rrdatas: resource data for the RR :rtype: :class:`google.cloud.dns.resource_record_set.ResourceRecordSet` :returns: a new ``ResourceRecordSet`` instance """
return ResourceRecordSet(name, record_type, ttl, rrdatas, zone=self)
<SYSTEM_TASK:> List resource record sets for this zone. <END_TASK> <USER_TASK:> Description: def list_resource_record_sets(self, max_results=None, page_token=None, client=None): """List resource record sets for this zone. See https://cloud.google.com/dns/api/v1/resourceRecordSets/list :type max_results: int :param max_results: Optional. The maximum number of resource record sets to return. Defaults to a sensible value set by the API. :type page_token: str :param page_token: Optional. If present, return the next batch of resource record sets, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :type client: :class:`google.cloud.dns.client.Client` :param client: (Optional) the client to use. If not passed, falls back to the ``client`` stored on the current zone. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~.resource_record_set.ResourceRecordSet` belonging to this zone. """
client = self._require_client(client) path = "/projects/%s/managedZones/%s/rrsets" % (self.project, self.name) iterator = page_iterator.HTTPIterator( client=client, api_request=client._connection.api_request, path=path, item_to_value=_item_to_resource_record_set, items_key="rrsets", page_token=page_token, max_results=max_results, ) iterator.zone = self return iterator
<SYSTEM_TASK:> Run image detection and annotation for an image. <END_TASK> <USER_TASK:> Description: def annotate_image(self, request, retry=None, timeout=None): """Run image detection and annotation for an image. Example: >>> from google.cloud.vision_v1 import ImageAnnotatorClient >>> client = ImageAnnotatorClient() >>> request = { ... 'image': { ... 'source': {'image_uri': 'https://foo.com/image.jpg'}, ... }, ... } >>> response = client.annotate_image(request) Args: request (:class:`~.vision_v1.types.AnnotateImageRequest`) retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: :class:`~.vision_v1.types.AnnotateImageResponse` The API response. """
# If the image is a file handler, set the content. image = protobuf.get(request, "image") if hasattr(image, "read"): img_bytes = image.read() protobuf.set(request, "image", {}) protobuf.set(request, "image.content", img_bytes) image = protobuf.get(request, "image") # If a filename is provided, read the file. filename = protobuf.get(image, "source.filename", default=None) if filename: with io.open(filename, "rb") as img_file: protobuf.set(request, "image.content", img_file.read()) protobuf.set(request, "image.source", None) # This method allows features not to be specified, and you get all # of them. protobuf.setdefault(request, "features", self._get_all_features()) r = self.batch_annotate_images([request], retry=retry, timeout=timeout) return r.responses[0]
<SYSTEM_TASK:> Creates a new job. <END_TASK> <USER_TASK:> Description: def create_job( self, parent, job, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new job. Typically, the job becomes searchable within 10 seconds, but it may take up to 5 minutes. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.JobServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `job`: >>> job = {} >>> >>> response = client.create_job(parent, job) Args: parent (str): Required. The resource name of the project under which the job is created. The format is "projects/{project\_id}", for example, "projects/api-test-project". job (Union[dict, ~google.cloud.talent_v4beta1.types.Job]): Required. The Job to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Job` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Job` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_job" not in self._inner_api_calls: self._inner_api_calls[ "create_job" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_job, default_retry=self._method_configs["CreateJob"].retry, default_timeout=self._method_configs["CreateJob"].timeout, client_info=self._client_info, ) request = job_service_pb2.CreateJobRequest(parent=parent, job=job) return self._inner_api_calls["create_job"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Retrieves the specified job, whose status is OPEN or recently EXPIRED <END_TASK> <USER_TASK:> Description: def get_job( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Retrieves the specified job, whose status is OPEN or recently EXPIRED within the last 90 days. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.JobServiceClient() >>> >>> name = client.job_path('[PROJECT]', '[JOBS]') >>> >>> response = client.get_job(name) Args: name (str): Required. The resource name of the job to retrieve. The format is "projects/{project\_id}/jobs/{job\_id}", for example, "projects/api-test-project/jobs/1234". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Job` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "get_job" not in self._inner_api_calls: self._inner_api_calls[ "get_job" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_job, default_retry=self._method_configs["GetJob"].retry, default_timeout=self._method_configs["GetJob"].timeout, client_info=self._client_info, ) request = job_service_pb2.GetJobRequest(name=name) return self._inner_api_calls["get_job"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Deletes a list of ``Job``\ s by filter. <END_TASK> <USER_TASK:> Description: def batch_delete_jobs( self, parent, filter_, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes a list of ``Job``\ s by filter. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.JobServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `filter_`: >>> filter_ = '' >>> >>> client.batch_delete_jobs(parent, filter_) Args: parent (str): Required. The resource name of the project under which the job is created. The format is "projects/{project\_id}", for example, "projects/api-test-project". filter_ (str): Required. The filter string specifies the jobs to be deleted. Supported operator: =, AND The fields eligible for filtering are: - ``companyName`` (Required) - ``requisitionId`` (Required) Sample Query: companyName = "projects/api-test-project/companies/123" AND requisitionId = "req-1" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "batch_delete_jobs" not in self._inner_api_calls: self._inner_api_calls[ "batch_delete_jobs" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.batch_delete_jobs, default_retry=self._method_configs["BatchDeleteJobs"].retry, default_timeout=self._method_configs["BatchDeleteJobs"].timeout, client_info=self._client_info, ) request = job_service_pb2.BatchDeleteJobsRequest(parent=parent, filter=filter_) self._inner_api_calls["batch_delete_jobs"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Searches for jobs using the provided ``SearchJobsRequest``. <END_TASK> <USER_TASK:> Description: def search_jobs( self, parent, request_metadata, search_mode=None, job_query=None, enable_broadening=None, require_precise_result_size=None, histogram_queries=None, job_view=None, offset=None, page_size=None, order_by=None, diversification_level=None, custom_ranking_info=None, disable_keyword_match=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Searches for jobs using the provided ``SearchJobsRequest``. This call constrains the ``visibility`` of jobs present in the database, and only returns jobs that the caller has permission to search against. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.JobServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `request_metadata`: >>> request_metadata = {} >>> >>> # Iterate over all results >>> for element in client.search_jobs(parent, request_metadata): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.search_jobs(parent, request_metadata).pages: ... for element in page: ... # process element ... pass Args: parent (str): Required. The resource name of the project to search within. The format is "projects/{project\_id}", for example, "projects/api-test-project". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the job searcher, used to improve the search quality of the service.. The identifiers, (such as ``user_id``) are provided by users, and must be unique and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` search_mode (~google.cloud.talent_v4beta1.types.SearchMode): Optional. Mode of a search. Defaults to ``SearchMode.JOB_SEARCH``. job_query (Union[dict, ~google.cloud.talent_v4beta1.types.JobQuery]): Optional. Query used to search against jobs, such as keyword, location filters, etc. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.JobQuery` enable_broadening (bool): Optional. Controls whether to broaden the search when it produces sparse results. Broadened queries append results to the end of the matching results list. Defaults to false. require_precise_result_size (bool): Optional. Controls if the search job request requires the return of a precise count of the first 300 results. Setting this to ``true`` ensures consistency in the number of results per page. Best practice is to set this value to true if a client allows users to jump directly to a non-sequential search results page. Enabling this flag may adversely impact performance. Defaults to false. histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. An expression specifies a histogram request against matching jobs. Expression syntax is an aggregation function call with histogram facets and other options. Available aggregation function calls are: \* ``count(string_histogram_facet)``: Count the number of matching entities, for each distinct attribute value. \* ``count(numeric_histogram_facet, list of buckets)``: Count the number of matching entities within each bucket. Data types: - Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any string with backslash escape for quote(")." - Number: whole number and floating point number like 10, -1 and -0.01. - List: list of elements with comma(,) separator surrounded by square brackets, for example, [1, 2, 3] and ["one", "two", "three"]. Built-in constants: - MIN (minimum number similar to java Double.MIN\_VALUE) - MAX (maximum number similar to java Double.MAX\_VALUE) Built-in functions: - bucket(start, end[, label]): bucket built-in function creates a bucket with range of \`start, end). Note that the end is exclusive, for example, bucket(1, MAX, "positive number") or bucket(1, 10). Job histogram facets: - company\_id: histogram by [Job.distributor\_company\_id\`. - company\_display\_name: histogram by ``Job.company_display_name``. - employment\_type: histogram by ``Job.employment_types``, for example, "FULL\_TIME", "PART\_TIME". - company\_size: histogram by ``CompanySize``, for example, "SMALL", "MEDIUM", "BIG". - publish\_time\_in\_month: histogram by the ``Job.publish_time`` in months. Must specify list of numeric buckets in spec. - publish\_time\_in\_year: histogram by the ``Job.publish_time`` in years. Must specify list of numeric buckets in spec. - degree\_type: histogram by the ``Job.degree_type``, for example, "Bachelors", "Masters". - job\_level: histogram by the ``Job.job_level``, for example, "Entry Level". - country: histogram by the country code of jobs, for example, "US", "FR". - admin1: histogram by the admin1 code of jobs, which is a global placeholder referring to the state, province, or the particular term a country uses to define the geographic structure below the country level, for example, "CA", "IL". - city: histogram by a combination of the "city name, admin1 code". For example, "Mountain View, CA", "New York, NY". - admin1\_country: histogram by a combination of the "admin1 code, country", for example, "CA, US", "IL, US". - city\_coordinate: histogram by the city center's GPS coordinates (latitude and longitude), for example, 37.4038522,-122.0987765. Since the coordinates of a city center can change, customers may need to refresh them periodically. - locale: histogram by the ``Job.language_code``, for example, "en-US", "fr-FR". - language: histogram by the language subtag of the ``Job.language_code``, for example, "en", "fr". - category: histogram by the ``JobCategory``, for example, "COMPUTER\_AND\_IT", "HEALTHCARE". - base\_compensation\_unit: histogram by the ``CompensationUnit`` of base salary, for example, "WEEKLY", "MONTHLY". - base\_compensation: histogram by the base salary. Must specify list of numeric buckets to group results by. - annualized\_base\_compensation: histogram by the base annualized salary. Must specify list of numeric buckets to group results by. - annualized\_total\_compensation: histogram by the total annualized salary. Must specify list of numeric buckets to group results by. - string\_custom\_attribute: histogram by string ``Job.custom_attributes``. Values can be accessed via square bracket notations like string\_custom\_attribute["key1"]. - numeric\_custom\_attribute: histogram by numeric ``Job.custom_attributes``. Values can be accessed via square bracket notations like numeric\_custom\_attribute["key1"]. Must specify list of numeric buckets to group results by. Example expressions: \* count(admin1) \* count(base\_compensation, [bucket(1000, 10000), bucket(10000, 100000), bucket(100000, MAX)]) \* count(string\_custom\_attribute["some-string-custom-attribute"]) \* count(numeric\_custom\_attribute["some-numeric-custom-attribute"], [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative"]) If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` job_view (~google.cloud.talent_v4beta1.types.JobView): Optional. The desired job attributes returned for jobs in the search response. Defaults to ``JobView.SMALL`` if no value is specified. offset (int): Optional. An integer that specifies the current offset (that is, starting result location, amongst the jobs deemed by the API as relevant) in search results. This field is only considered if ``page_token`` is unset. For example, 0 means to return results starting from the first matching job, and 10 means to return from the 11th job. This can be used for pagination, (for example, pageSize = 10 and offset = 10 means to return from the second page). page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. order_by (str): Optional. The criteria determining how search results are sorted. Default is "relevance desc". Supported options are: - "relevance desc": By relevance descending, as determined by the API algorithms. Relevance thresholding of query results is only available with this ordering. - "posting``_``\ publish\ ``_``\ time desc": By ``Job.posting_publish_time`` descending. - "posting``_``\ update\ ``_``\ time desc": By ``Job.posting_update_time`` descending. - "title": By ``Job.title`` ascending. - "title desc": By ``Job.title`` descending. - "annualized``_``\ base\ ``_``\ compensation": By job's ``CompensationInfo.annualized_base_compensation_range`` ascending. Jobs whose annualized base compensation is unspecified are put at the end of search results. - "annualized``_``\ base\ ``_``\ compensation desc": By job's ``CompensationInfo.annualized_base_compensation_range`` descending. Jobs whose annualized base compensation is unspecified are put at the end of search results. - "annualized``_``\ total\ ``_``\ compensation": By job's ``CompensationInfo.annualized_total_compensation_range`` ascending. Jobs whose annualized base compensation is unspecified are put at the end of search results. - "annualized``_``\ total\ ``_``\ compensation desc": By job's ``CompensationInfo.annualized_total_compensation_range`` descending. Jobs whose annualized base compensation is unspecified are put at the end of search results. - "custom``_``\ ranking desc": By the relevance score adjusted to the ``SearchJobsRequest.custom_ranking_info.ranking_expression`` with weight factor assigned by ``SearchJobsRequest.custom_ranking_info.importance_level`` in descending order. - "location``_``\ distance": By the distance between the location on jobs and locations specified in the ``SearchJobsRequest.job_query.location_filters``. When this order is selected, the ``SearchJobsRequest.job_query.location_filters`` must not be empty. When a job has multiple locations, the location closest to one of the locations specified in the location filter will be used to calculate location distance. Distance is calculated by the distance between two lat/long coordinates, with a precision of 10e-4 degrees (11.3 meters). Jobs that don't have locations specified will be ranked below jobs having locations. Diversification strategy is still applied unless explicitly disabled in ``SearchJobsRequest.diversification_level``. diversification_level (~google.cloud.talent_v4beta1.types.DiversificationLevel): Optional. Controls whether highly similar jobs are returned next to each other in the search results. Jobs are identified as highly similar based on their titles, job categories, and locations. Highly similar results are clustered so that only one representative job of the cluster is displayed to the job seeker higher up in the results, with the other jobs being displayed lower down in the results. Defaults to ``DiversificationLevel.SIMPLE`` if no value is specified. custom_ranking_info (Union[dict, ~google.cloud.talent_v4beta1.types.CustomRankingInfo]): Optional. Controls over how job documents get ranked on top of existing relevance score (determined by API algorithm). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.CustomRankingInfo` disable_keyword_match (bool): Optional. Controls whether to disable exact keyword match on ``Job.job_title``, ``Job.description``, ``Job.company_display_name``, [Job.locations][0], ``Job.qualifications``. When disable keyword match is turned off, a keyword match returns jobs that do not match given category filters when there are matching keywords. For example, for the query "program manager," a result is returned even if the job posting has the title "software developer," which doesn't fall into "program manager" ontology, but does have "program manager" appearing in its description. For queries like "cloud" that don't contain title or location specific ontology, jobs with "cloud" keyword matches are returned regardless of this flag's value. Please use ``Company.keyword_searchable_custom_fields`` or ``Company.keyword_searchable_custom_attributes`` if company specific globally matched custom field/attribute string values is needed. Enabling keyword match improves recall of subsequent search requests. Defaults to false. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.talent_v4beta1.types.MatchingJob` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "search_jobs" not in self._inner_api_calls: self._inner_api_calls[ "search_jobs" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.search_jobs, default_retry=self._method_configs["SearchJobs"].retry, default_timeout=self._method_configs["SearchJobs"].timeout, client_info=self._client_info, ) request = job_service_pb2.SearchJobsRequest( parent=parent, request_metadata=request_metadata, search_mode=search_mode, job_query=job_query, enable_broadening=enable_broadening, require_precise_result_size=require_precise_result_size, histogram_queries=histogram_queries, job_view=job_view, offset=offset, page_size=page_size, order_by=order_by, diversification_level=diversification_level, custom_ranking_info=custom_ranking_info, disable_keyword_match=disable_keyword_match, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["search_jobs"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="matching_jobs", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
<SYSTEM_TASK:> Compute a type URL for a klass. <END_TASK> <USER_TASK:> Description: def _compute_type_url(klass, prefix=_GOOGLE_APIS_PREFIX): """Compute a type URL for a klass. :type klass: type :param klass: class to be used as a factory for the given type :type prefix: str :param prefix: URL prefix for the type :rtype: str :returns: the URL, prefixed as appropriate """
name = klass.DESCRIPTOR.full_name return "%s/%s" % (prefix, name)
<SYSTEM_TASK:> Register a klass as the factory for a given type URL. <END_TASK> <USER_TASK:> Description: def register_type(klass, type_url=None): """Register a klass as the factory for a given type URL. :type klass: :class:`type` :param klass: class to be used as a factory for the given type :type type_url: str :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. :raises ValueError: if a registration already exists for the URL. """
if type_url is None: type_url = _compute_type_url(klass) if type_url in _TYPE_URL_MAP: if _TYPE_URL_MAP[type_url] is not klass: raise ValueError("Conflict: %s" % (_TYPE_URL_MAP[type_url],)) _TYPE_URL_MAP[type_url] = klass
<SYSTEM_TASK:> Convert an ``Any`` protobuf into the actual class. <END_TASK> <USER_TASK:> Description: def _from_any(any_pb): """Convert an ``Any`` protobuf into the actual class. Uses the type URL to do the conversion. .. note:: This assumes that the type URL is already registered. :type any_pb: :class:`google.protobuf.any_pb2.Any` :param any_pb: An any object to be converted. :rtype: object :returns: The instance (of the correct type) stored in the any instance. """
klass = _TYPE_URL_MAP[any_pb.type_url] return klass.FromString(any_pb.value)
<SYSTEM_TASK:> Polls the status of the current operation. <END_TASK> <USER_TASK:> Description: def _get_operation_rpc(self): """Polls the status of the current operation. Uses gRPC request to check. :rtype: :class:`~google.longrunning.operations_pb2.Operation` :returns: The latest status of the current operation. """
request_pb = operations_pb2.GetOperationRequest(name=self.name) return self.client._operations_stub.GetOperation(request_pb)
<SYSTEM_TASK:> Checks the status of the current operation. <END_TASK> <USER_TASK:> Description: def _get_operation_http(self): """Checks the status of the current operation. Uses HTTP request to check. :rtype: :class:`~google.longrunning.operations_pb2.Operation` :returns: The latest status of the current operation. """
path = "operations/%s" % (self.name,) api_response = self.client._connection.api_request(method="GET", path=path) return json_format.ParseDict(api_response, operations_pb2.Operation())
<SYSTEM_TASK:> Update the state of the current object based on operation. <END_TASK> <USER_TASK:> Description: def _update_state(self, operation_pb): """Update the state of the current object based on operation. :type operation_pb: :class:`~google.longrunning.operations_pb2.Operation` :param operation_pb: Protobuf to be parsed. """
if operation_pb.done: self._complete = True if operation_pb.HasField("metadata"): self.metadata = _from_any(operation_pb.metadata) result_type = operation_pb.WhichOneof("result") if result_type == "error": self.error = operation_pb.error elif result_type == "response": self.response = _from_any(operation_pb.response)