text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> List subcollections of the current document. <END_TASK> <USER_TASK:> Description: def collections(self, page_size=None): """List subcollections of the current document. Args: page_size (Optional[int]]): The maximum number of collections in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. Returns: Sequence[~.firestore_v1beta1.collection.CollectionReference]: iterator of subcollections of the current document. If the document does not exist at the time of `snapshot`, the iterator will be empty """
iterator = self._client._firestore_api.list_collection_ids( self._document_path, page_size=page_size, metadata=self._client._rpc_metadata, ) iterator.document = self iterator.item_to_value = _item_to_collection_ref return iterator
<SYSTEM_TASK:> Get a value from the snapshot data. <END_TASK> <USER_TASK:> Description: def get(self, field_path): """Get a value from the snapshot data. If the data is nested, for example: .. code-block:: python >>> snapshot.to_dict() { 'top1': { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, }, 'top6': b'\x00\x01 foo', } a **field path** can be used to access the nested data. For example: .. code-block:: python >>> snapshot.get('top1') { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, } >>> snapshot.get('top1.middle2') { 'bottom3': 20, 'bottom4': 22, } >>> snapshot.get('top1.middle2.bottom3') 20 See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. A copy is returned since the data may contain mutable values, but the data stored in the snapshot must remain immutable. Args: field_path (str): A field path (``.``-delimited list of field names). Returns: Any or None: (A copy of) the value stored for the ``field_path`` or None if snapshot document does not exist. Raises: KeyError: If the ``field_path`` does not match nested data in the snapshot. """
if not self._exists: return None nested_data = field_path_module.get_nested_value(field_path, self._data) return copy.deepcopy(nested_data)
<SYSTEM_TASK:> Return a fully-qualified database string. <END_TASK> <USER_TASK:> Description: def database_path(cls, project, instance, database): """Return a fully-qualified database string."""
return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/databases/{database}", project=project, instance=instance, database=database, )
<SYSTEM_TASK:> List instances owned by the project. <END_TASK> <USER_TASK:> Description: def list_instances(self): """List instances owned by the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_instances] :end-before: [END bigtable_list_instances] :rtype: tuple :returns: (instances, failed_locations), where 'instances' is list of :class:`google.cloud.bigtable.instance.Instance`, and 'failed_locations' is a list of locations which could not be resolved. """
resp = self.instance_admin_client.list_instances(self.project_path) instances = [Instance.from_pb(instance, self) for instance in resp.instances] return instances, resp.failed_locations
<SYSTEM_TASK:> List the clusters in the project. <END_TASK> <USER_TASK:> Description: def list_clusters(self): """List the clusters in the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_clusters_in_project] :end-before: [END bigtable_list_clusters_in_project] :rtype: tuple :returns: (clusters, failed_locations), where 'clusters' is list of :class:`google.cloud.bigtable.instance.Cluster`, and 'failed_locations' is a list of strings representing locations which could not be resolved. """
resp = self.instance_admin_client.list_clusters( self.instance_admin_client.instance_path(self.project, "-") ) clusters = [] instances = {} for cluster in resp.clusters: match_cluster_name = _CLUSTER_NAME_RE.match(cluster.name) instance_id = match_cluster_name.group("instance") if instance_id not in instances: instances[instance_id] = self.instance(instance_id) clusters.append(Cluster.from_pb(cluster, instances[instance_id])) return clusters, resp.failed_locations
<SYSTEM_TASK:> Lists all clusters owned by a project in either the specified zone or all <END_TASK> <USER_TASK:> Description: def list_clusters( self, project_id, zone, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists all clusters owned by a project in either the specified zone or all zones. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> response = client.list_clusters(project_id, zone) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the parent field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides, or "-" for all zones. This field has been deprecated and replaced by the parent field. parent (str): The parent (project and location) where the clusters will be listed. Specified in the format 'projects/*/locations/*'. Location "-" matches all zones and all regions. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.ListClustersResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "list_clusters" not in self._inner_api_calls: self._inner_api_calls[ "list_clusters" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_clusters, default_retry=self._method_configs["ListClusters"].retry, default_timeout=self._method_configs["ListClusters"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListClustersRequest( project_id=project_id, zone=zone, parent=parent ) return self._inner_api_calls["list_clusters"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Gets the details of a specific cluster. <END_TASK> <USER_TASK:> Description: def get_cluster( self, project_id, zone, cluster_id, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the details of a specific cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> response = client.get_cluster(project_id, zone, cluster_id) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster to retrieve. This field has been deprecated and replaced by the name field. name (str): The name (project, location, cluster) of the cluster to retrieve. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Cluster` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "get_cluster" not in self._inner_api_calls: self._inner_api_calls[ "get_cluster" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_cluster, default_retry=self._method_configs["GetCluster"].retry, default_timeout=self._method_configs["GetCluster"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.GetClusterRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, name=name ) return self._inner_api_calls["get_cluster"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Sets labels on a cluster. <END_TASK> <USER_TASK:> Description: def set_labels( self, project_id, zone, cluster_id, resource_labels, label_fingerprint, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Sets labels on a cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> # TODO: Initialize `resource_labels`: >>> resource_labels = {} >>> >>> # TODO: Initialize `label_fingerprint`: >>> label_fingerprint = '' >>> >>> response = client.set_labels(project_id, zone, cluster_id, resource_labels, label_fingerprint) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://developers.google.com/console/help/new/#projectnumber>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster. This field has been deprecated and replaced by the name field. resource_labels (dict[str -> str]): The labels to set for that cluster. label_fingerprint (str): The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Kubernetes Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash when updating or changing labels. Make a <code>get()</code> request to the resource to get the latest fingerprint. name (str): The name (project, location, cluster id) of the cluster to set labels. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "set_labels" not in self._inner_api_calls: self._inner_api_calls[ "set_labels" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_labels, default_retry=self._method_configs["SetLabels"].retry, default_timeout=self._method_configs["SetLabels"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLabelsRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, resource_labels=resource_labels, label_fingerprint=label_fingerprint, name=name, ) return self._inner_api_calls["set_labels"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Return a fully-qualified metric string. <END_TASK> <USER_TASK:> Description: def metric_path(cls, project, metric): """Return a fully-qualified metric string."""
return google.api_core.path_template.expand( "projects/{project}/metrics/{metric}", project=project, metric=metric )
<SYSTEM_TASK:> Gets a logs-based metric. <END_TASK> <USER_TASK:> Description: def get_log_metric( self, metric_name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets a logs-based metric. Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.MetricsServiceV2Client() >>> >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') >>> >>> response = client.get_log_metric(metric_name) Args: metric_name (str): The resource name of the desired metric: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.logging_v2.types.LogMetric` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "get_log_metric" not in self._inner_api_calls: self._inner_api_calls[ "get_log_metric" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_log_metric, default_retry=self._method_configs["GetLogMetric"].retry, default_timeout=self._method_configs["GetLogMetric"].timeout, client_info=self._client_info, ) request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("metric_name", metric_name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["get_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Return a fully-qualified metric_descriptor string. <END_TASK> <USER_TASK:> Description: def metric_descriptor_path(cls, project, metric_descriptor): """Return a fully-qualified metric_descriptor string."""
return google.api_core.path_template.expand( "projects/{project}/metricDescriptors/{metric_descriptor=**}", project=project, metric_descriptor=metric_descriptor, )
<SYSTEM_TASK:> Return keyrange's state as a dict. <END_TASK> <USER_TASK:> Description: def _to_dict(self): """Return keyrange's state as a dict. :rtype: dict :returns: state of this instance. """
mapping = {} if self.start_open: mapping["start_open"] = self.start_open if self.start_closed: mapping["start_closed"] = self.start_closed if self.end_open: mapping["end_open"] = self.end_open if self.end_closed: mapping["end_closed"] = self.end_closed return mapping
<SYSTEM_TASK:> Return keyset's state as a dict. <END_TASK> <USER_TASK:> Description: def _to_dict(self): """Return keyset's state as a dict. The result can be used to serialize the instance and reconstitute it later using :meth:`_from_dict`. :rtype: dict :returns: state of this instance. """
if self.all_: return {"all": True} return { "keys": self.keys, "ranges": [keyrange._to_dict() for keyrange in self.ranges], }
<SYSTEM_TASK:> Create an instance from the corresponding state mapping. <END_TASK> <USER_TASK:> Description: def _from_dict(cls, mapping): """Create an instance from the corresponding state mapping. :type mapping: dict :param mapping: the instance state. """
if mapping.get("all"): return cls(all_=True) r_mappings = mapping.get("ranges", ()) ranges = [KeyRange(**r_mapping) for r_mapping in r_mappings] return cls(keys=mapping.get("keys", ()), ranges=ranges)
<SYSTEM_TASK:> Wrap errors for Unary-Stream and Stream-Stream gRPC callables. <END_TASK> <USER_TASK:> Description: def _wrap_stream_errors(callable_): """Wrap errors for Unary-Stream and Stream-Stream gRPC callables. The callables that return iterators require a bit more logic to re-map errors when iterating. This wraps both the initial invocation and the iterator of the return value to re-map errors. """
_patch_callable_name(callable_) @general_helpers.wraps(callable_) def error_remapped_callable(*args, **kwargs): try: result = callable_(*args, **kwargs) return _StreamingResponseIterator(result) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc) return error_remapped_callable
<SYSTEM_TASK:> Create a secure channel with credentials. <END_TASK> <USER_TASK:> Description: def create_channel( target, credentials=None, scopes=None, ssl_credentials=None, **kwargs ): """Create a secure channel with credentials. Args: target (str): The target service address in the format 'hostname:port'. credentials (google.auth.credentials.Credentials): The credentials. If not specified, then this function will attempt to ascertain the credentials from the environment using :func:`google.auth.default`. scopes (Sequence[str]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. ssl_credentials (grpc.ChannelCredentials): Optional SSL channel credentials. This can be used to specify different certificates. kwargs: Additional key-word args passed to :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`. Returns: grpc.Channel: The created channel. """
if credentials is None: credentials, _ = google.auth.default(scopes=scopes) else: credentials = google.auth.credentials.with_scopes_if_required( credentials, scopes ) request = google.auth.transport.requests.Request() # Create the metadata plugin for inserting the authorization header. metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin( credentials, request ) # Create a set of grpc.CallCredentials using the metadata plugin. google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) if ssl_credentials is None: ssl_credentials = grpc.ssl_channel_credentials() # Combine the ssl credentials and the authorization credentials. composite_credentials = grpc.composite_channel_credentials( ssl_credentials, google_auth_credentials ) if HAS_GRPC_GCP: # If grpc_gcp module is available use grpc_gcp.secure_channel, # otherwise, use grpc.secure_channel to create grpc channel. return grpc_gcp.secure_channel(target, composite_credentials, **kwargs) else: return grpc.secure_channel(target, composite_credentials, **kwargs)
<SYSTEM_TASK:> Get the next response from the stream. <END_TASK> <USER_TASK:> Description: def next(self): """Get the next response from the stream. Returns: protobuf.Message: A single response from the stream. """
try: return six.next(self._wrapped) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc)
<SYSTEM_TASK:> A functools.wraps helper that handles partial objects on Python 2. <END_TASK> <USER_TASK:> Description: def wraps(wrapped): """A functools.wraps helper that handles partial objects on Python 2."""
if isinstance(wrapped, functools.partial): return six.wraps(wrapped, assigned=_PARTIAL_VALID_ASSIGNMENTS) else: return six.wraps(wrapped)
<SYSTEM_TASK:> Determine default project explicitly or implicitly as fall-back. <END_TASK> <USER_TASK:> Description: def _determine_default_project(project=None): """Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. """
if project is None: project = _get_gcd_project() if project is None: project = _base_default_project(project=project) return project
<SYSTEM_TASK:> Getter for a wrapped API object. <END_TASK> <USER_TASK:> Description: def _datastore_api(self): """Getter for a wrapped API object."""
if self._datastore_api_internal is None: if self._use_grpc: self._datastore_api_internal = make_datastore_api(self) else: self._datastore_api_internal = HTTPDatastoreAPI(self) return self._datastore_api_internal
<SYSTEM_TASK:> Retrieve entities, along with their attributes. <END_TASK> <USER_TASK:> Description: def get_multi( self, keys, missing=None, deferred=None, transaction=None, eventual=False ): """Retrieve entities, along with their attributes. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be retrieved from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entities returned by the backend as "missing" will be copied into it. If the list is not empty, an error will occur. :type deferred: list :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. If the list is not empty, an error will occur. :type transaction: :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project which does not match our project. :raises: :class:`ValueError` if eventual is True and in a transaction. """
if not keys: return [] ids = set(key.project for key in keys) for current_id in ids: if current_id != self.project: raise ValueError("Keys do not match project") if transaction is None: transaction = self.current_transaction entity_pbs = _extended_lookup( datastore_api=self._datastore_api, project=self.project, key_pbs=[key.to_protobuf() for key in keys], eventual=eventual, missing=missing, deferred=deferred, transaction_id=transaction and transaction.id, ) if missing is not None: missing[:] = [ helpers.entity_from_protobuf(missed_pb) for missed_pb in missing ] if deferred is not None: deferred[:] = [ helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred ] return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs]
<SYSTEM_TASK:> Save entities in the Cloud Datastore. <END_TASK> <USER_TASK:> Description: def put_multi(self, entities): """Save entities in the Cloud Datastore. :type entities: list of :class:`google.cloud.datastore.entity.Entity` :param entities: The entities to be saved to the datastore. :raises: :class:`ValueError` if ``entities`` is a single entity. """
if isinstance(entities, Entity): raise ValueError("Pass a sequence of entities") if not entities: return current = self.current_batch in_batch = current is not None if not in_batch: current = self.batch() current.begin() for entity in entities: current.put(entity) if not in_batch: current.commit()
<SYSTEM_TASK:> Delete keys from the Cloud Datastore. <END_TASK> <USER_TASK:> Description: def delete_multi(self, keys): """Delete keys from the Cloud Datastore. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be deleted from the Datastore. """
if not keys: return # We allow partial keys to attempt a delete, the backend will fail. current = self.current_batch in_batch = current is not None if not in_batch: current = self.batch() current.begin() for key in keys: current.delete(key) if not in_batch: current.commit()
<SYSTEM_TASK:> Allocate a list of IDs from a partial key. <END_TASK> <USER_TASK:> Description: def allocate_ids(self, incomplete_key, num_ids): """Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. """
if not incomplete_key.is_partial: raise ValueError(("Key is not partial.", incomplete_key)) incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids response_pb = self._datastore_api.allocate_ids( incomplete_key.project, incomplete_key_pbs ) allocated_ids = [ allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys ] return [ incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids ]
<SYSTEM_TASK:> Add a row to the batch. If the current batch meets one of the size <END_TASK> <USER_TASK:> Description: def mutate(self, row): """ Add a row to the batch. If the current batch meets one of the size limits, the batch is sent synchronously. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_mutate] :end-before: [END bigtable_batcher_mutate] :type row: class :param row: class:`~google.cloud.bigtable.row.DirectRow`. :raises: One of the following: * :exc:`~.table._BigtableRetryableError` if any row returned a transient error. * :exc:`RuntimeError` if the number of responses doesn't match the number of rows that were retried * :exc:`.batcher.MaxMutationsError` if any row exceeds max mutations count. """
mutation_count = len(row._get_mutations()) if mutation_count > MAX_MUTATIONS: raise MaxMutationsError( "The row key {} exceeds the number of mutations {}.".format( row.row_key, mutation_count ) ) if (self.total_mutation_count + mutation_count) >= MAX_MUTATIONS: self.flush() self.rows.append(row) self.total_mutation_count += mutation_count self.total_size += row.get_mutations_size() if self.total_size >= self.max_row_bytes or len(self.rows) >= self.flush_count: self.flush()
<SYSTEM_TASK:> Verify that a topic path is in the correct format. <END_TASK> <USER_TASK:> Description: def _parse_topic_path(topic_path): """Verify that a topic path is in the correct format. .. _resource manager docs: https://cloud.google.com/resource-manager/\ reference/rest/v1beta1/projects#\ Project.FIELDS.project_id .. _topic spec: https://cloud.google.com/storage/docs/json_api/v1/\ notifications/insert#topic Expected to be of the form: //pubsub.googleapis.com/projects/{project}/topics/{topic} where the ``project`` value must be "6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. Trailing hyphens are prohibited." (see `resource manager docs`_) and ``topic`` must have length at least two, must start with a letter and may only contain alphanumeric characters or ``-``, ``_``, ``.``, ``~``, ``+`` or ``%`` (i.e characters used for URL encoding, see `topic spec`_). Args: topic_path (str): The topic path to be verified. Returns: Tuple[str, str]: The ``project`` and ``topic`` parsed from the ``topic_path``. Raises: ValueError: If the topic path is invalid. """
match = _TOPIC_REF_RE.match(topic_path) if match is None: raise ValueError(_BAD_TOPIC.format(topic_path)) return match.group("name"), match.group("project")
<SYSTEM_TASK:> Construct an instance from the JSON repr returned by the server. <END_TASK> <USER_TASK:> Description: def from_api_repr(cls, resource, bucket): """Construct an instance from the JSON repr returned by the server. See: https://cloud.google.com/storage/docs/json_api/v1/notifications :type resource: dict :param resource: JSON repr of the notification :type bucket: :class:`google.cloud.storage.bucket.Bucket` :param bucket: Bucket to which the notification is bound. :rtype: :class:`BucketNotification` :returns: the new notification instance """
topic_path = resource.get("topic") if topic_path is None: raise ValueError("Resource has no topic") name, project = _parse_topic_path(topic_path) instance = cls(bucket, name, topic_project=project) instance._properties = resource return instance
<SYSTEM_TASK:> Test whether this notification exists. <END_TASK> <USER_TASK:> Description: def exists(self, client=None): """Test whether this notification exists. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. """
if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project try: client._connection.api_request( method="GET", path=self.path, query_params=query_params ) except NotFound: return False else: return True
<SYSTEM_TASK:> Update this notification from the server configuration. <END_TASK> <USER_TASK:> Description: def reload(self, client=None): """Update this notification from the server configuration. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. """
if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project response = client._connection.api_request( method="GET", path=self.path, query_params=query_params ) self._set_properties(response)
<SYSTEM_TASK:> Delete this notification. <END_TASK> <USER_TASK:> Description: def delete(self, client=None): """Delete this notification. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/delete If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.api_core.exceptions.NotFound`: if the notification does not exist. :raises ValueError: if the notification has no ID. """
if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project client._connection.api_request( method="DELETE", path=self.path, query_params=query_params )
<SYSTEM_TASK:> Creates a Redis instance based on the specified tier and memory size. <END_TASK> <USER_TASK:> Description: def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a Redis instance based on the specified tier and memory size. By default, the instance is accessible from the project's `default network <https://cloud.google.com/compute/docs/networks-and-firewalls#networks>`__. The creation is executed asynchronously and callers may check the returned operation to track its progress. Once the operation is completed the Redis instance will be fully functional. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> instance_id = 'test_instance' >>> tier = enums.Instance.Tier.BASIC >>> memory_size_gb = 1 >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> >>> response = client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region instance_id (str): Required. The logical name of the Redis instance in the customer project with the following restrictions: - Must contain only lowercase letters, numbers, and hyphens. - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - Must be unique within the customer project / location instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_instance" not in self._inner_api_calls: self._inner_api_calls[ "create_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs["CreateInstance"].retry, default_timeout=self._method_configs["CreateInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) operation = self._inner_api_calls["create_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=cloud_redis_pb2.OperationMetadata, )
<SYSTEM_TASK:> Import a Redis RDB snapshot file from GCS into a Redis instance. <END_TASK> <USER_TASK:> Description: def import_instance( self, name, input_config, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Import a Redis RDB snapshot file from GCS into a Redis instance. Redis may stop serving during this operation. Instance state will be IMPORTING for entire operation. When complete, the instance will contain only data from the imported file. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> >>> client = redis_v1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> # TODO: Initialize `input_config`: >>> input_config = {} >>> >>> response = client.import_instance(name, input_config) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region input_config (Union[dict, ~google.cloud.redis_v1.types.InputConfig]): Required. Specify data to be imported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.InputConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "import_instance" not in self._inner_api_calls: self._inner_api_calls[ "import_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.import_instance, default_retry=self._method_configs["ImportInstance"].retry, default_timeout=self._method_configs["ImportInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.ImportInstanceRequest( name=name, input_config=input_config ) operation = self._inner_api_calls["import_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=cloud_redis_pb2.OperationMetadata, )
<SYSTEM_TASK:> Return a fully-qualified notification_channel string. <END_TASK> <USER_TASK:> Description: def notification_channel_path(cls, project, notification_channel): """Return a fully-qualified notification_channel string."""
return google.api_core.path_template.expand( "projects/{project}/notificationChannels/{notification_channel}", project=project, notification_channel=notification_channel, )
<SYSTEM_TASK:> Return a fully-qualified notification_channel_descriptor string. <END_TASK> <USER_TASK:> Description: def notification_channel_descriptor_path(cls, project, channel_descriptor): """Return a fully-qualified notification_channel_descriptor string."""
return google.api_core.path_template.expand( "projects/{project}/notificationChannelDescriptors/{channel_descriptor}", project=project, channel_descriptor=channel_descriptor, )
<SYSTEM_TASK:> Return a batch to use as a context manager. <END_TASK> <USER_TASK:> Description: def batch(self, client=None): """Return a batch to use as a context manager. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. :rtype: :class:`Batch` :returns: A batch to use as a context manager. """
client = self._require_client(client) return Batch(self, client)
<SYSTEM_TASK:> Send saved log entries as a single API call. <END_TASK> <USER_TASK:> Description: def commit(self, client=None): """Send saved log entries as a single API call. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current batch. """
if client is None: client = self.client kwargs = {"logger_name": self.logger.full_name} if self.resource is not None: kwargs["resource"] = self.resource._to_dict() if self.logger.labels is not None: kwargs["labels"] = self.logger.labels entries = [entry.to_api_repr() for entry in self.entries] client.logging_api.write_entries(entries, **kwargs) del self.entries[:]
<SYSTEM_TASK:> Coerce 'value' to a datetime date, if set or not nullable <END_TASK> <USER_TASK:> Description: def _time_from_json(value, field): """Coerce 'value' to a datetime date, if set or not nullable"""
if _not_null(value, field): if len(value) == 8: # HH:MM:SS fmt = _TIMEONLY_WO_MICROS elif len(value) == 15: # HH:MM:SS.micros fmt = _TIMEONLY_W_MICROS else: raise ValueError("Unknown time format: {}".format(value)) return datetime.datetime.strptime(value, fmt).time()
<SYSTEM_TASK:> Coerce 'value' to a mapping, if set or not nullable. <END_TASK> <USER_TASK:> Description: def _record_from_json(value, field): """Coerce 'value' to a mapping, if set or not nullable."""
if _not_null(value, field): record = {} record_iter = zip(field.fields, value["f"]) for subfield, cell in record_iter: converter = _CELLDATA_FROM_JSON[subfield.field_type] if subfield.mode == "REPEATED": value = [converter(item["v"], subfield) for item in cell["v"]] else: value = converter(cell["v"], subfield) record[subfield.name] = value return record
<SYSTEM_TASK:> Convert JSON row data to row with appropriate types. <END_TASK> <USER_TASK:> Description: def _row_tuple_from_json(row, schema): """Convert JSON row data to row with appropriate types. Note: ``row['f']`` and ``schema`` are presumed to be of the same length. :type row: dict :param row: A JSON response row to be converted. :type schema: tuple :param schema: A tuple of :class:`~google.cloud.bigquery.schema.SchemaField`. :rtype: tuple :returns: A tuple of data converted to native types. """
row_data = [] for field, cell in zip(schema, row["f"]): converter = _CELLDATA_FROM_JSON[field.field_type] if field.mode == "REPEATED": row_data.append([converter(item["v"], field) for item in cell["v"]]) else: row_data.append(converter(cell["v"], field)) return tuple(row_data)
<SYSTEM_TASK:> Convert JSON row data to rows with appropriate types. <END_TASK> <USER_TASK:> Description: def _rows_from_json(values, schema): """Convert JSON row data to rows with appropriate types."""
from google.cloud.bigquery import Row field_to_index = _field_to_index_mapping(schema) return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values]
<SYSTEM_TASK:> Maps a field and value to a JSON-safe value. <END_TASK> <USER_TASK:> Description: def _scalar_field_to_json(field, row_value): """Maps a field and value to a JSON-safe value. Args: field ( \ :class:`~google.cloud.bigquery.schema.SchemaField`, \ ): The SchemaField to use for type conversion and field name. row_value (any): Value to be converted, based on the field's type. Returns: any: A JSON-serializable object. """
converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type) if converter is None: # STRING doesn't need converting return row_value return converter(row_value)
<SYSTEM_TASK:> Convert a field into JSON-serializable values. <END_TASK> <USER_TASK:> Description: def _field_to_json(field, row_value): """Convert a field into JSON-serializable values. Args: field ( \ :class:`~google.cloud.bigquery.schema.SchemaField`, \ ): The SchemaField to use for type conversion and field name. row_value (Union[ \ Sequence[list], \ any, \ ]): Row data to be inserted. If the SchemaField's mode is REPEATED, assume this is a list. If not, the type is inferred from the SchemaField's field_type. Returns: any: A JSON-serializable object. """
if row_value is None: return None if field.mode == "REPEATED": return _repeated_field_to_json(field, row_value) if field.field_type == "RECORD": return _record_field_to_json(field.fields, row_value) return _scalar_field_to_json(field, row_value)
<SYSTEM_TASK:> Get a nested value from a dictionary. <END_TASK> <USER_TASK:> Description: def _get_sub_prop(container, keys, default=None): """Get a nested value from a dictionary. This method works like ``dict.get(key)``, but for nested values. Arguments: container (dict): A dictionary which may contain other dictionaries as values. keys (iterable): A sequence of keys to attempt to get the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key attempts to get the value within that, and so on. default (object): (Optional) Value to returned if any of the keys are not found. Defaults to ``None``. Examples: Get a top-level value (equivalent to ``container.get('key')``). >>> _get_sub_prop({'key': 'value'}, ['key']) 'value' Get a top-level value, providing a default (equivalent to ``container.get('key', default='default')``). >>> _get_sub_prop({'nothere': 123}, ['key'], default='not found') 'not found' Get a nested value. >>> _get_sub_prop({'key': {'subkey': 'value'}}, ['key', 'subkey']) 'value' Returns: object: The value if present or the default. """
sub_val = container for key in keys: if key not in sub_val: return default sub_val = sub_val[key] return sub_val
<SYSTEM_TASK:> Set a nested value in a dictionary. <END_TASK> <USER_TASK:> Description: def _set_sub_prop(container, keys, value): """Set a nested value in a dictionary. Arguments: container (dict): A dictionary which may contain other dictionaries as values. keys (iterable): A sequence of keys to attempt to set the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key attempts to get the value within that, and so on. value (object): Value to set within the container. Examples: Set a top-level value (equivalent to ``container['key'] = 'value'``). >>> container = {} >>> _set_sub_prop(container, ['key'], 'value') >>> container {'key': 'value'} Set a nested value. >>> container = {} >>> _set_sub_prop(container, ['key', 'subkey'], 'value') >>> container {'key': {'subkey': 'value'}} Replace a nested value. >>> container = {'key': {'subkey': 'prev'}} >>> _set_sub_prop(container, ['key', 'subkey'], 'new') >>> container {'key': {'subkey': 'new'}} """
sub_val = container for key in keys[:-1]: if key not in sub_val: sub_val[key] = {} sub_val = sub_val[key] sub_val[keys[-1]] = value
<SYSTEM_TASK:> Remove a nested key fro a dictionary. <END_TASK> <USER_TASK:> Description: def _del_sub_prop(container, keys): """Remove a nested key fro a dictionary. Arguments: container (dict): A dictionary which may contain other dictionaries as values. keys (iterable): A sequence of keys to attempt to clear the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key attempts to get the value within that, and so on. Examples: Remove a top-level value (equivalent to ``del container['key']``). >>> container = {'key': 'value'} >>> _del_sub_prop(container, ['key']) >>> container {} Remove a nested value. >>> container = {'key': {'subkey': 'value'}} >>> _del_sub_prop(container, ['key', 'subkey']) >>> container {'key': {}} """
sub_val = container for key in keys[:-1]: if key not in sub_val: sub_val[key] = {} sub_val = sub_val[key] if keys[-1] in sub_val: del sub_val[keys[-1]]
<SYSTEM_TASK:> Build a resource based on a ``_properties`` dictionary, filtered by <END_TASK> <USER_TASK:> Description: def _build_resource_from_properties(obj, filter_fields): """Build a resource based on a ``_properties`` dictionary, filtered by ``filter_fields``, which follow the name of the Python object. """
partial = {} for filter_field in filter_fields: api_field = obj._PROPERTY_TO_API_FIELD.get(filter_field) if api_field is None and filter_field not in obj._properties: raise ValueError("No property %s" % filter_field) elif api_field is not None: partial[api_field] = obj._properties.get(api_field) else: # allows properties that are not defined in the library # and properties that have the same name as API resource key partial[filter_field] = obj._properties[filter_field] return partial
<SYSTEM_TASK:> Make sure a "Reference" database ID is empty. <END_TASK> <USER_TASK:> Description: def _check_database_id(database_id): """Make sure a "Reference" database ID is empty. :type database_id: unicode :param database_id: The ``database_id`` field from a "Reference" protobuf. :raises: :exc:`ValueError` if the ``database_id`` is not empty. """
if database_id != u"": msg = _DATABASE_ID_TEMPLATE.format(database_id) raise ValueError(msg)
<SYSTEM_TASK:> Add the ID or name from an element to a list. <END_TASK> <USER_TASK:> Description: def _add_id_or_name(flat_path, element_pb, empty_allowed): """Add the ID or name from an element to a list. :type flat_path: list :param flat_path: List of accumulated path parts. :type element_pb: :class:`._app_engine_key_pb2.Path.Element` :param element_pb: The element containing ID or name. :type empty_allowed: bool :param empty_allowed: Indicates if neither ID or name need be set. If :data:`False`, then **exactly** one of them must be. :raises: :exc:`ValueError` if 0 or 2 of ID/name are set (unless ``empty_allowed=True`` and 0 are set). """
id_ = element_pb.id name = element_pb.name # NOTE: Below 0 and the empty string are the "null" values for their # respective types, indicating that the value is unset. if id_ == 0: if name == u"": if not empty_allowed: raise ValueError(_EMPTY_ELEMENT) else: flat_path.append(name) else: if name == u"": flat_path.append(id_) else: msg = _BAD_ELEMENT_TEMPLATE.format(id_, name) raise ValueError(msg)
<SYSTEM_TASK:> Convert a tuple of ints and strings in a legacy "Path". <END_TASK> <USER_TASK:> Description: def _to_legacy_path(dict_path): """Convert a tuple of ints and strings in a legacy "Path". .. note: This assumes, but does not verify, that each entry in ``dict_path`` is valid (i.e. doesn't have more than one key out of "name" / "id"). :type dict_path: lsit :param dict_path: The "structured" path for a key, i.e. it is a list of dictionaries, each of which has "kind" and one of "name" / "id" as keys. :rtype: :class:`._app_engine_key_pb2.Path` :returns: The legacy path corresponding to ``dict_path``. """
elements = [] for part in dict_path: element_kwargs = {"type": part["kind"]} if "id" in part: element_kwargs["id"] = part["id"] elif "name" in part: element_kwargs["name"] = part["name"] element = _app_engine_key_pb2.Path.Element(**element_kwargs) elements.append(element) return _app_engine_key_pb2.Path(element=elements)
<SYSTEM_TASK:> Parses positional arguments into key path with kinds and IDs. <END_TASK> <USER_TASK:> Description: def _parse_path(path_args): """Parses positional arguments into key path with kinds and IDs. :type path_args: tuple :param path_args: A tuple from positional arguments. Should be alternating list of kinds (string) and ID/name parts (int or string). :rtype: :class:`list` of :class:`dict` :returns: A list of key parts with kind and ID or name set. :raises: :class:`ValueError` if there are no ``path_args``, if one of the kinds is not a string or if one of the IDs/names is not a string or an integer. """
if len(path_args) == 0: raise ValueError("Key path must not be empty.") kind_list = path_args[::2] id_or_name_list = path_args[1::2] # Dummy sentinel value to pad incomplete key to even length path. partial_ending = object() if len(path_args) % 2 == 1: id_or_name_list += (partial_ending,) result = [] for kind, id_or_name in zip(kind_list, id_or_name_list): curr_key_part = {} if isinstance(kind, six.string_types): curr_key_part["kind"] = kind else: raise ValueError(kind, "Kind was not a string.") if isinstance(id_or_name, six.string_types): curr_key_part["name"] = id_or_name elif isinstance(id_or_name, six.integer_types): curr_key_part["id"] = id_or_name elif id_or_name is not partial_ending: raise ValueError(id_or_name, "ID/name was not a string or integer.") result.append(curr_key_part) return result
<SYSTEM_TASK:> Sets protected data by combining raw data set from the constructor. <END_TASK> <USER_TASK:> Description: def _combine_args(self): """Sets protected data by combining raw data set from the constructor. If a ``_parent`` is set, updates the ``_flat_path`` and sets the ``_namespace`` and ``_project`` if not already set. :rtype: :class:`list` of :class:`dict` :returns: A list of key parts with kind and ID or name set. :raises: :class:`ValueError` if the parent key is not complete. """
child_path = self._parse_path(self._flat_path) if self._parent is not None: if self._parent.is_partial: raise ValueError("Parent key must be complete.") # We know that _parent.path() will return a copy. child_path = self._parent.path + child_path self._flat_path = self._parent.flat_path + self._flat_path if ( self._namespace is not None and self._namespace != self._parent.namespace ): raise ValueError("Child namespace must agree with parent's.") self._namespace = self._parent.namespace if self._project is not None and self._project != self._parent.project: raise ValueError("Child project must agree with parent's.") self._project = self._parent.project return child_path
<SYSTEM_TASK:> Duplicates the Key. <END_TASK> <USER_TASK:> Description: def _clone(self): """Duplicates the Key. Most attributes are simple types, so don't require copying. Other attributes like ``parent`` are long-lived and so we re-use them. :rtype: :class:`google.cloud.datastore.key.Key` :returns: A new ``Key`` instance with the same data as the current one. """
cloned_self = self.__class__( *self.flat_path, project=self.project, namespace=self.namespace ) # If the current parent has already been set, we re-use # the same instance cloned_self._parent = self._parent return cloned_self
<SYSTEM_TASK:> Return a protobuf corresponding to the key. <END_TASK> <USER_TASK:> Description: def to_protobuf(self): """Return a protobuf corresponding to the key. :rtype: :class:`.entity_pb2.Key` :returns: The protobuf representing the key. """
key = _entity_pb2.Key() key.partition_id.project_id = self.project if self.namespace: key.partition_id.namespace_id = self.namespace for item in self.path: element = key.path.add() if "kind" in item: element.kind = item["kind"] if "id" in item: element.id = item["id"] if "name" in item: element.name = item["name"] return key
<SYSTEM_TASK:> Convert to a base64 encode urlsafe string for App Engine. <END_TASK> <USER_TASK:> Description: def to_legacy_urlsafe(self, location_prefix=None): """Convert to a base64 encode urlsafe string for App Engine. This is intended to work with the "legacy" representation of a datastore "Key" used within Google App Engine (a so-called "Reference"). The returned string can be used as the ``urlsafe`` argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values will have padding removed. .. note:: The string returned by ``to_legacy_urlsafe`` is equivalent, but not identical, to the string returned by ``ndb``. The location prefix may need to be specified to obtain identical urlsafe keys. :type location_prefix: str :param location_prefix: The location prefix of an App Engine project ID. Often this value is 's~', but may also be 'e~', or other location prefixes currently unknown. :rtype: bytes :returns: A bytestring containing the key encoded as URL-safe base64. """
if location_prefix is None: project_id = self.project else: project_id = location_prefix + self.project reference = _app_engine_key_pb2.Reference( app=project_id, path=_to_legacy_path(self._path), # Avoid the copy. name_space=self.namespace, ) raw_bytes = reference.SerializeToString() return base64.urlsafe_b64encode(raw_bytes).strip(b"=")
<SYSTEM_TASK:> Creates a parent key for the current path. <END_TASK> <USER_TASK:> Description: def _make_parent(self): """Creates a parent key for the current path. Extracts all but the last element in the key path and creates a new key, while still matching the namespace and the project. :rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType` :returns: A new ``Key`` instance, whose path consists of all but the last element of current path. If the current key has only one path element, returns ``None``. """
if self.is_partial: parent_args = self.flat_path[:-1] else: parent_args = self.flat_path[:-2] if parent_args: return self.__class__( *parent_args, project=self.project, namespace=self.namespace )
<SYSTEM_TASK:> The parent of the current key. <END_TASK> <USER_TASK:> Description: def parent(self): """The parent of the current key. :rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType` :returns: A new ``Key`` instance, whose path consists of all but the last element of current path. If the current key has only one path element, returns ``None``. """
if self._parent is None: self._parent = self._make_parent() return self._parent
<SYSTEM_TASK:> Callback that attempts to send pending logs before termination. <END_TASK> <USER_TASK:> Description: def _main_thread_terminated(self): """Callback that attempts to send pending logs before termination."""
if not self.is_alive: return if not self._queue.empty(): print( "Program shutting down, attempting to send %d queued log " "entries to Stackdriver Logging..." % (self._queue.qsize(),), file=sys.stderr, ) if self.stop(self._grace_period): print("Sent all pending logs.", file=sys.stderr) else: print( "Failed to send %d pending logs." % (self._queue.qsize(),), file=sys.stderr, )
<SYSTEM_TASK:> Queues a log entry to be written by the background thread. <END_TASK> <USER_TASK:> Description: def enqueue( self, record, message, resource=None, labels=None, trace=None, span_id=None ): """Queues a log entry to be written by the background thread. :type record: :class:`logging.LogRecord` :param record: Python log record that the handler was called with. :type message: str :param message: The message from the ``LogRecord`` after being formatted by the associated log formatters. :type resource: :class:`~google.cloud.logging.resource.Resource` :param resource: (Optional) Monitored resource of the entry :type labels: dict :param labels: (Optional) Mapping of labels for the entry. :type trace: str :param trace: (optional) traceid to apply to the logging entry. :type span_id: str :param span_id: (optional) span_id within the trace for the log entry. Specify the trace parameter if span_id is set. """
self._queue.put_nowait( { "info": {"message": message, "python_logger": record.name}, "severity": record.levelname, "resource": resource, "labels": labels, "trace": trace, "span_id": span_id, } )
<SYSTEM_TASK:> Check whether ``other`` is an ancestor. <END_TASK> <USER_TASK:> Description: def eq_or_parent(self, other): """Check whether ``other`` is an ancestor. Returns: (bool) True IFF ``other`` is an ancestor or equal to ``self``, else False. """
return self.parts[: len(other.parts)] == other.parts[: len(self.parts)]
<SYSTEM_TASK:> Gets the latest state of a long-running operation. <END_TASK> <USER_TASK:> Description: def get_operation( self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT ): """Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service. Example: >>> from google.api_core import operations_v1 >>> api = operations_v1.OperationsClient() >>> name = '' >>> response = api.get_operation(name) Args: name (str): The name of the operation resource. retry (google.api_core.retry.Retry): The retry strategy to use when invoking the RPC. If unspecified, the default retry from the client configuration will be used. If ``None``, then this method will not retry the RPC at all. timeout (float): The amount of time in seconds to wait for the RPC to complete. Note that if ``retry`` is used, this timeout applies to each individual attempt and the overall time it takes for this method to complete may be longer. If unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. Returns: google.longrunning.operations_pb2.Operation: The state of the operation. Raises: google.api_core.exceptions.GoogleAPICallError: If an error occurred while invoking the RPC, the appropriate ``GoogleAPICallError`` subclass will be raised. """
request = operations_pb2.GetOperationRequest(name=name) return self._get_operation(request, retry=retry, timeout=timeout)
<SYSTEM_TASK:> Lists operations that match the specified filter in the request. <END_TASK> <USER_TASK:> Description: def list_operations( self, name, filter_, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, ): """ Lists operations that match the specified filter in the request. Example: >>> from google.api_core import operations_v1 >>> api = operations_v1.OperationsClient() >>> name = '' >>> >>> # Iterate over all results >>> for operation in api.list_operations(name): >>> # process operation >>> pass >>> >>> # Or iterate over results one page at a time >>> iter = api.list_operations(name) >>> for page in iter.pages: >>> for operation in page: >>> # process operation >>> pass Args: name (str): The name of the operation collection. filter_ (str): The standard list filter. retry (google.api_core.retry.Retry): The retry strategy to use when invoking the RPC. If unspecified, the default retry from the client configuration will be used. If ``None``, then this method will not retry the RPC at all. timeout (float): The amount of time in seconds to wait for the RPC to complete. Note that if ``retry`` is used, this timeout applies to each individual attempt and the overall time it takes for this method to complete may be longer. If unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. Returns: google.api_core.page_iterator.Iterator: An iterator that yields :class:`google.longrunning.operations_pb2.Operation` instances. Raises: google.api_core.exceptions.MethodNotImplemented: If the server does not support this method. Services are not required to implement this method. google.api_core.exceptions.GoogleAPICallError: If an error occurred while invoking the RPC, the appropriate ``GoogleAPICallError`` subclass will be raised. """
# Create the request object. request = operations_pb2.ListOperationsRequest(name=name, filter=filter_) # Create the method used to fetch pages method = functools.partial(self._list_operations, retry=retry, timeout=timeout) iterator = page_iterator.GRPCIterator( client=None, method=method, request=request, items_field="operations", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
<SYSTEM_TASK:> Starts asynchronous cancellation on a long-running operation. <END_TASK> <USER_TASK:> Description: def cancel_operation( self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT ): """Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. Clients can use :meth:`get_operation` or service- specific methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an ``Operation.error`` value with a ``google.rpc.Status.code`` of ``1``, corresponding to ``Code.CANCELLED``. Example: >>> from google.api_core import operations_v1 >>> api = operations_v1.OperationsClient() >>> name = '' >>> api.cancel_operation(name) Args: name (str): The name of the operation resource to be cancelled. retry (google.api_core.retry.Retry): The retry strategy to use when invoking the RPC. If unspecified, the default retry from the client configuration will be used. If ``None``, then this method will not retry the RPC at all. timeout (float): The amount of time in seconds to wait for the RPC to complete. Note that if ``retry`` is used, this timeout applies to each individual attempt and the overall time it takes for this method to complete may be longer. If unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. Raises: google.api_core.exceptions.MethodNotImplemented: If the server does not support this method. Services are not required to implement this method. google.api_core.exceptions.GoogleAPICallError: If an error occurred while invoking the RPC, the appropriate ``GoogleAPICallError`` subclass will be raised. """
# Create the request object. request = operations_pb2.CancelOperationRequest(name=name) self._cancel_operation(request, retry=retry, timeout=timeout)
<SYSTEM_TASK:> Deletes a long-running operation. <END_TASK> <USER_TASK:> Description: def delete_operation( self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT ): """Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. Example: >>> from google.api_core import operations_v1 >>> api = operations_v1.OperationsClient() >>> name = '' >>> api.delete_operation(name) Args: name (str): The name of the operation resource to be deleted. retry (google.api_core.retry.Retry): The retry strategy to use when invoking the RPC. If unspecified, the default retry from the client configuration will be used. If ``None``, then this method will not retry the RPC at all. timeout (float): The amount of time in seconds to wait for the RPC to complete. Note that if ``retry`` is used, this timeout applies to each individual attempt and the overall time it takes for this method to complete may be longer. If unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. Raises: google.api_core.exceptions.MethodNotImplemented: If the server does not support this method. Services are not required to implement this method. google.api_core.exceptions.GoogleAPICallError: If an error occurred while invoking the RPC, the appropriate ``GoogleAPICallError`` subclass will be raised. """
# Create the request object. request = operations_pb2.DeleteOperationRequest(name=name) self._delete_operation(request, retry=retry, timeout=timeout)
<SYSTEM_TASK:> Extract the config name from a full resource name. <END_TASK> <USER_TASK:> Description: def config_name_from_full_name(full_name): """Extract the config name from a full resource name. >>> config_name_from_full_name('projects/my-proj/configs/my-config') "my-config" :type full_name: str :param full_name: The full resource name of a config. The full resource name looks like ``projects/project-name/configs/config-name`` and is returned as the ``name`` field of a config resource. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs :rtype: str :returns: The config's short name, given its full resource name. :raises: :class:`ValueError` if ``full_name`` is not the expected format """
projects, _, configs, result = full_name.split("/") if projects != "projects" or configs != "configs": raise ValueError( "Unexpected format of resource", full_name, 'Expected "projects/{proj}/configs/{cfg}"', ) return result
<SYSTEM_TASK:> Extract the variable name from a full resource name. <END_TASK> <USER_TASK:> Description: def variable_name_from_full_name(full_name): """Extract the variable name from a full resource name. >>> variable_name_from_full_name( 'projects/my-proj/configs/my-config/variables/var-name') "var-name" >>> variable_name_from_full_name( 'projects/my-proj/configs/my-config/variables/another/var/name') "another/var/name" :type full_name: str :param full_name: The full resource name of a variable. The full resource name looks like ``projects/prj-name/configs/cfg-name/variables/var-name`` and is returned as the ``name`` field of a variable resource. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables :rtype: str :returns: The variable's short name, given its full resource name. :raises: :class:`ValueError` if ``full_name`` is not the expected format """
projects, _, configs, _, variables, result = full_name.split("/", 5) if projects != "projects" or configs != "configs" or variables != "variables": raise ValueError( "Unexpected format of resource", full_name, 'Expected "projects/{proj}/configs/{cfg}/variables/..."', ) return result
<SYSTEM_TASK:> Return the maximum value in this histogram. <END_TASK> <USER_TASK:> Description: def max(self): """Return the maximum value in this histogram. If there are no values in the histogram at all, return 600. Returns: int: The maximum value in the histogram. """
if len(self._data) == 0: return 600 return next(iter(reversed(sorted(self._data.keys()))))
<SYSTEM_TASK:> Return the minimum value in this histogram. <END_TASK> <USER_TASK:> Description: def min(self): """Return the minimum value in this histogram. If there are no values in the histogram at all, return 10. Returns: int: The minimum value in the histogram. """
if len(self._data) == 0: return 10 return next(iter(sorted(self._data.keys())))
<SYSTEM_TASK:> Add the value to this histogram. <END_TASK> <USER_TASK:> Description: def add(self, value): """Add the value to this histogram. Args: value (int): The value. Values outside of ``10 <= x <= 600`` will be raised to ``10`` or reduced to ``600``. """
# If the value is out of bounds, bring it in bounds. value = int(value) if value < 10: value = 10 if value > 600: value = 600 # Add the value to the histogram's data dictionary. self._data.setdefault(value, 0) self._data[value] += 1 self._len += 1
<SYSTEM_TASK:> Return the value that is the Nth precentile in the histogram. <END_TASK> <USER_TASK:> Description: def percentile(self, percent): """Return the value that is the Nth precentile in the histogram. Args: percent (Union[int, float]): The precentile being sought. The default consumer implementations use consistently use ``99``. Returns: int: The value corresponding to the requested percentile. """
# Sanity check: Any value over 100 should become 100. if percent >= 100: percent = 100 # Determine the actual target number. target = len(self) - len(self) * (percent / 100) # Iterate over the values in reverse, dropping the target by the # number of times each value has been seen. When the target passes # 0, return the value we are currently viewing. for k in reversed(sorted(self._data.keys())): target -= self._data[k] if target < 0: return k # The only way to get here is if there was no data. # In this case, just return 10 seconds. return 10
<SYSTEM_TASK:> Creates a job. <END_TASK> <USER_TASK:> Description: def create_job( self, parent, job, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a job. Example: >>> from google.cloud import scheduler_v1beta1 >>> >>> client = scheduler_v1beta1.CloudSchedulerClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> >>> # TODO: Initialize `job`: >>> job = {} >>> >>> response = client.create_job(parent, job) Args: parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. The job to add. The user can optionally specify a name for the job in ``name``. ``name`` cannot be the same as an existing job. If a name is not specified then the system will generate a random unique name that will be returned (``name``) in the response. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.scheduler_v1beta1.types.Job` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.scheduler_v1beta1.types.Job` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_job" not in self._inner_api_calls: self._inner_api_calls[ "create_job" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_job, default_retry=self._method_configs["CreateJob"].retry, default_timeout=self._method_configs["CreateJob"].timeout, client_info=self._client_info, ) request = cloudscheduler_pb2.CreateJobRequest(parent=parent, job=job) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_job"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Construct a model reference from model ID string. <END_TASK> <USER_TASK:> Description: def from_string(cls, model_id, default_project=None): """Construct a model reference from model ID string. Args: model_id (str): A model ID in standard SQL format. If ``default_project`` is not specified, this must included a project ID, dataset ID, and model ID, each separated by ``.``. default_project (str): Optional. The project ID to use when ``model_id`` does not include a project ID. Returns: google.cloud.bigquery.model.ModelReference: Model reference parsed from ``model_id``. Raises: ValueError: If ``model_id`` is not a fully-qualified table ID in standard SQL format. """
proj, dset, model = _helpers._parse_3_part_id( model_id, default_project=default_project, property_name="model_id" ) return cls.from_api_repr( {"projectId": proj, "datasetId": dset, "modelId": model} )
<SYSTEM_TASK:> Leases tasks from a pull queue for ``lease_duration``. <END_TASK> <USER_TASK:> Description: def lease_tasks( self, parent, lease_duration, max_tasks=None, response_view=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Leases tasks from a pull queue for ``lease_duration``. This method is invoked by the worker to obtain a lease. The worker must acknowledge the task via ``AcknowledgeTask`` after they have performed the work associated with the task. The ``payload`` is intended to store data that the worker needs to perform the work associated with the task. To return the payloads in the ``response``, set ``response_view`` to ``FULL``. A maximum of 10 qps of ``LeaseTasks`` requests are allowed per queue. ``RESOURCE_EXHAUSTED`` is returned when this limit is exceeded. ``RESOURCE_EXHAUSTED`` is also returned when ``max_tasks_dispatched_per_second`` is exceeded. Example: >>> from google.cloud import tasks_v2beta2 >>> >>> client = tasks_v2beta2.CloudTasksClient() >>> >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') >>> >>> # TODO: Initialize `lease_duration`: >>> lease_duration = {} >>> >>> response = client.lease_tasks(parent, lease_duration) Args: parent (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): After the worker has successfully finished the work associated with the task, the worker must call via ``AcknowledgeTask`` before the ``schedule_time``. Otherwise the task will be returned to a later ``LeaseTasks`` call so that another worker can retry it. The maximum lease duration is 1 week. ``lease_duration`` will be truncated to the nearest second. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Duration` max_tasks (int): The maximum number of tasks to lease. The system will make a best effort to return as close to as ``max_tasks`` as possible. The largest that ``max_tasks`` can be is 1000. response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved by default because some data, such as payloads, might be desirable to return only when needed because of its large size or because of the sensitivity of data that it contains. Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView`` `Google IAM <https://cloud.google.com/iam/>`___ permission on the ``Task`` resource. filter_ (str): ``filter`` can be used to specify a subset of tasks to lease. When ``filter`` is set to ``tag=<my-tag>`` then the ``response`` will contain only tasks whose ``tag`` is equal to ``<my-tag>``. ``<my-tag>`` must be less than 500 characters. When ``filter`` is set to ``tag_function=oldest_tag()``, only tasks which have the same tag as the task with the oldest ``schedule_time`` will be returned. Grammar Syntax: - ``filter = "tag=" tag | "tag_function=" function`` - ``tag = string`` - ``function = "oldest_tag()"`` The ``oldest_tag()`` function returns tasks which have the same tag as the oldest task (ordered by schedule time). SDK compatibility: Although the SDK allows tags to be either string or `bytes <https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/taskqueue/TaskOptions.html#tag-byte:A->`__, only UTF-8 encoded tags can be used in Cloud Tasks. Tag which aren't UTF-8 encoded can't be used in the ``filter`` and the task's ``tag`` will be displayed as empty in Cloud Tasks. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "lease_tasks" not in self._inner_api_calls: self._inner_api_calls[ "lease_tasks" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.lease_tasks, default_retry=self._method_configs["LeaseTasks"].retry, default_timeout=self._method_configs["LeaseTasks"].timeout, client_info=self._client_info, ) request = cloudtasks_pb2.LeaseTasksRequest( parent=parent, lease_duration=lease_duration, max_tasks=max_tasks, response_view=response_view, filter=filter_, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["lease_tasks"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Helper to format a LogRecord in in Stackdriver fluentd format. <END_TASK> <USER_TASK:> Description: def format_stackdriver_json(record, message): """Helper to format a LogRecord in in Stackdriver fluentd format. :rtype: str :returns: JSON str to be written to the log file. """
subsecond, second = math.modf(record.created) payload = { "message": message, "timestamp": {"seconds": int(second), "nanos": int(subsecond * 1e9)}, "thread": record.thread, "severity": record.levelname, } return json.dumps(payload)
<SYSTEM_TASK:> Helper to get trace_id from web application request header. <END_TASK> <USER_TASK:> Description: def get_trace_id(): """Helper to get trace_id from web application request header. :rtype: str :returns: TraceID in HTTP request headers. """
checkers = ( get_trace_id_from_django, get_trace_id_from_flask, get_trace_id_from_webapp2, ) for checker in checkers: trace_id = checker() if trace_id is not None: return trace_id return None
<SYSTEM_TASK:> Return a fully-qualified group string. <END_TASK> <USER_TASK:> Description: def group_path(cls, project, group): """Return a fully-qualified group string."""
return google.api_core.path_template.expand( "projects/{project}/groups/{group}", project=project, group=group )
<SYSTEM_TASK:> Lists the existing groups. <END_TASK> <USER_TASK:> Description: def list_groups( self, name, children_of_group=None, ancestors_of_group=None, descendants_of_group=None, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists the existing groups. Example: >>> from google.cloud import monitoring_v3 >>> >>> client = monitoring_v3.GroupServiceClient() >>> >>> name = client.project_path('[PROJECT]') >>> >>> # Iterate over all results >>> for element in client.list_groups(name): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_groups(name).pages: ... for element in page: ... # process element ... pass Args: name (str): The project whose groups are to be listed. The format is ``"projects/{project_id_or_number}"``. children_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. Returns groups whose ``parentName`` field contains the group name. If no groups have this parent, the results are empty. ancestors_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. Returns groups that are ancestors of the specified group. The groups are returned in order, starting with the immediate parent and ending with the most distant ancestor. If the specified group has no immediate parent, the results are empty. descendants_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``. Returns the descendants of the specified group. This is a superset of the results returned by the ``childrenOfGroup`` filter, and includes children-of-children, and so forth. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.monitoring_v3.types.Group` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
if metadata is None: metadata = [] metadata = list(metadata) # Wrap the transport method to add retry and timeout logic. if "list_groups" not in self._inner_api_calls: self._inner_api_calls[ "list_groups" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_groups, default_retry=self._method_configs["ListGroups"].retry, default_timeout=self._method_configs["ListGroups"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( children_of_group=children_of_group, ancestors_of_group=ancestors_of_group, descendants_of_group=descendants_of_group, ) request = group_service_pb2.ListGroupsRequest( name=name, children_of_group=children_of_group, ancestors_of_group=ancestors_of_group, descendants_of_group=descendants_of_group, page_size=page_size, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_groups"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="group", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
<SYSTEM_TASK:> Returns a list of ``Voice`` supported for synthesis. <END_TASK> <USER_TASK:> Description: def list_voices( self, language_code=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Returns a list of ``Voice`` supported for synthesis. Example: >>> from google.cloud import texttospeech_v1beta1 >>> >>> client = texttospeech_v1beta1.TextToSpeechClient() >>> >>> response = client.list_voices() Args: language_code (str): Optional (but recommended) `BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__ language tag. If specified, the ListVoices call will only return voices that can be used to synthesize this language\_code. E.g. when specifying "en-NZ", you will get supported "en-*" voices; when specifying "no", you will get supported "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices; specifying "zh" will also get supported "cmn-*" voices; specifying "zh-hk" will also get supported "yue-\*" voices. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.texttospeech_v1beta1.types.ListVoicesResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "list_voices" not in self._inner_api_calls: self._inner_api_calls[ "list_voices" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_voices, default_retry=self._method_configs["ListVoices"].retry, default_timeout=self._method_configs["ListVoices"].timeout, client_info=self._client_info, ) request = cloud_tts_pb2.ListVoicesRequest(language_code=language_code) return self._inner_api_calls["list_voices"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Raise AttributeError if the credentials are unsigned. <END_TASK> <USER_TASK:> Description: def ensure_signed_credentials(credentials): """Raise AttributeError if the credentials are unsigned. :type credentials: :class:`google.auth.credentials.Signing` :param credentials: The credentials used to create a private key for signing text. :raises: :exc:`AttributeError` if credentials is not an instance of :class:`google.auth.credentials.Signing`. """
if not isinstance(credentials, google.auth.credentials.Signing): auth_uri = ( "https://google-cloud-python.readthedocs.io/en/latest/" "core/auth.html?highlight=authentication#setting-up-" "a-service-account" ) raise AttributeError( "you need a private key to sign credentials." "the credentials you are currently using %s " "just contains a token. see %s for more " "details." % (type(credentials), auth_uri) )
<SYSTEM_TASK:> Gets query parameters for creating a signed URL. <END_TASK> <USER_TASK:> Description: def get_signed_query_params_v2(credentials, expiration, string_to_sign): """Gets query parameters for creating a signed URL. :type credentials: :class:`google.auth.credentials.Signing` :param credentials: The credentials used to create a private key for signing text. :type expiration: int or long :param expiration: When the signed URL should expire. :type string_to_sign: str :param string_to_sign: The string to be signed by the credentials. :raises: :exc:`AttributeError` if credentials is not an instance of :class:`google.auth.credentials.Signing`. :rtype: dict :returns: Query parameters matching the signing credentials with a signed payload. """
ensure_signed_credentials(credentials) signature_bytes = credentials.sign_bytes(string_to_sign) signature = base64.b64encode(signature_bytes) service_account_name = credentials.signer_email return { "GoogleAccessId": service_account_name, "Expires": str(expiration), "Signature": signature, }
<SYSTEM_TASK:> Convert 'expiration' to a number of seconds in the future. <END_TASK> <USER_TASK:> Description: def get_expiration_seconds_v2(expiration): """Convert 'expiration' to a number of seconds in the future. :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. :raises: :exc:`TypeError` when expiration is not a valid type. :rtype: int :returns: a timestamp as an absolute number of seconds since epoch. """
# If it's a timedelta, add it to `now` in UTC. if isinstance(expiration, datetime.timedelta): now = NOW().replace(tzinfo=_helpers.UTC) expiration = now + expiration # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): micros = _helpers._microseconds_from_datetime(expiration) expiration = micros // 10 ** 6 if not isinstance(expiration, six.integer_types): raise TypeError( "Expected an integer timestamp, datetime, or " "timedelta. Got %s" % type(expiration) ) return expiration
<SYSTEM_TASK:> Convert 'expiration' to a number of seconds offset from the current time. <END_TASK> <USER_TASK:> Description: def get_expiration_seconds_v4(expiration): """Convert 'expiration' to a number of seconds offset from the current time. :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. :raises: :exc:`TypeError` when expiration is not a valid type. :raises: :exc:`ValueError` when expiration is too large. :rtype: Integer :returns: seconds in the future when the signed URL will expire """
if not isinstance(expiration, _EXPIRATION_TYPES): raise TypeError( "Expected an integer timestamp, datetime, or " "timedelta. Got %s" % type(expiration) ) now = NOW().replace(tzinfo=_helpers.UTC) if isinstance(expiration, six.integer_types): seconds = expiration if isinstance(expiration, datetime.datetime): if expiration.tzinfo is None: expiration = expiration.replace(tzinfo=_helpers.UTC) expiration = expiration - now if isinstance(expiration, datetime.timedelta): seconds = int(expiration.total_seconds()) if seconds > SEVEN_DAYS: raise ValueError( "Max allowed expiration interval is seven days (%d seconds)".format( SEVEN_DAYS ) ) return seconds
<SYSTEM_TASK:> Canonicalize headers for signing. <END_TASK> <USER_TASK:> Description: def get_canonical_headers(headers): """Canonicalize headers for signing. See: https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers :type headers: Union[dict|List(Tuple(str,str))] :param headers: (Optional) Additional HTTP headers to be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers Requests using the signed URL *must* pass the specified header (name and value) with each request for the URL. :rtype: str :returns: List of headers, normalized / sortted per the URL refernced above. """
if headers is None: headers = [] elif isinstance(headers, dict): headers = list(headers.items()) if not headers: return [], [] normalized = collections.defaultdict(list) for key, val in headers: key = key.lower().strip() val = MULTIPLE_SPACES.sub(" ", val.strip()) normalized[key].append(val) ordered_headers = sorted((key, ",".join(val)) for key, val in normalized.items()) canonical_headers = ["{}:{}".format(*item) for item in ordered_headers] return canonical_headers, ordered_headers
<SYSTEM_TASK:> Canonicalize method, resource <END_TASK> <USER_TASK:> Description: def canonicalize(method, resource, query_parameters, headers): """Canonicalize method, resource :type method: str :param method: The HTTP verb that will be used when requesting the URL. Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the signature will additionally contain the `x-goog-resumable` header, and the method changed to POST. See the signed URL docs regarding this flow: https://cloud.google.com/storage/docs/access-control/signed-urls :type resource: str :param resource: A pointer to a specific resource (typically, ``/bucket-name/path/to/blob.txt``). :type query_parameters: dict :param query_parameters: (Optional) Additional query paramtersto be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers#query :type headers: Union[dict|List(Tuple(str,str))] :param headers: (Optional) Additional HTTP headers to be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers Requests using the signed URL *must* pass the specified header (name and value) with each request for the URL. :rtype: :class:_Canonical :returns: Canonical method, resource, query_parameters, and headers. """
headers, _ = get_canonical_headers(headers) if method == "RESUMABLE": method = "POST" headers.append("x-goog-resumable:start") if query_parameters is None: return _Canonical(method, resource, [], headers) normalized_qp = sorted( (key.lower(), value and value.strip() or "") for key, value in query_parameters.items() ) encoded_qp = six.moves.urllib.parse.urlencode(normalized_qp) canonical_resource = "{}?{}".format(resource, encoded_qp) return _Canonical(method, canonical_resource, normalized_qp, headers)
<SYSTEM_TASK:> Generate a V2 signed URL to provide query-string auth'n to a resource. <END_TASK> <USER_TASK:> Description: def generate_signed_url_v2( credentials, resource, expiration, api_access_endpoint="", method="GET", content_md5=None, content_type=None, response_type=None, response_disposition=None, generation=None, headers=None, query_parameters=None, ): """Generate a V2 signed URL to provide query-string auth'n to a resource. .. note:: Assumes ``credentials`` implements the :class:`google.auth.credentials.Signing` interface. Also assumes ``credentials`` has a ``service_account_email`` property which identifies the credentials. .. note:: If you are on Google Compute Engine, you can't generate a signed URL. Follow `Issue 922`_ for updates on this. If you'd like to be able to generate a signed URL from GCE, you can use a standard service account from a JSON file rather than a GCE service account. See headers `reference`_ for more details on optional arguments. .. _Issue 922: https://github.com/GoogleCloudPlatform/\ google-cloud-python/issues/922 .. _reference: https://cloud.google.com/storage/docs/reference-headers :type credentials: :class:`google.auth.credentials.Signing` :param credentials: Credentials object with an associated private key to sign text. :type resource: str :param resource: A pointer to a specific resource (typically, ``/bucket-name/path/to/blob.txt``). :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. :type api_access_endpoint: str :param api_access_endpoint: Optional URI base. Defaults to empty string. :type method: str :param method: The HTTP verb that will be used when requesting the URL. Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the signature will additionally contain the `x-goog-resumable` header, and the method changed to POST. See the signed URL docs regarding this flow: https://cloud.google.com/storage/docs/access-control/signed-urls :type content_md5: str :param content_md5: (Optional) The MD5 hash of the object referenced by ``resource``. :type content_type: str :param content_type: (Optional) The content type of the object referenced by ``resource``. :type response_type: str :param response_type: (Optional) Content type of responses to requests for the signed URL. Used to over-ride the content type of the underlying resource. :type response_disposition: str :param response_disposition: (Optional) Content disposition of responses to requests for the signed URL. :type generation: str :param generation: (Optional) A value that indicates which generation of the resource to fetch. :type headers: Union[dict|List(Tuple(str,str))] :param headers: (Optional) Additional HTTP headers to be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers Requests using the signed URL *must* pass the specified header (name and value) with each request for the URL. :type query_parameters: dict :param query_parameters: (Optional) Additional query paramtersto be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers#query :raises: :exc:`TypeError` when expiration is not a valid type. :raises: :exc:`AttributeError` if credentials is not an instance of :class:`google.auth.credentials.Signing`. :rtype: str :returns: A signed URL you can use to access the resource until expiration. """
expiration_stamp = get_expiration_seconds_v2(expiration) canonical = canonicalize(method, resource, query_parameters, headers) # Generate the string to sign. elements_to_sign = [ canonical.method, content_md5 or "", content_type or "", str(expiration_stamp), ] elements_to_sign.extend(canonical.headers) elements_to_sign.append(canonical.resource) string_to_sign = "\n".join(elements_to_sign) # Set the right query parameters. signed_query_params = get_signed_query_params_v2( credentials, expiration_stamp, string_to_sign ) if response_type is not None: signed_query_params["response-content-type"] = response_type if response_disposition is not None: signed_query_params["response-content-disposition"] = response_disposition if generation is not None: signed_query_params["generation"] = generation signed_query_params.update(canonical.query_parameters) sorted_signed_query_params = sorted(signed_query_params.items()) # Return the built URL. return "{endpoint}{resource}?{querystring}".format( endpoint=api_access_endpoint, resource=resource, querystring=six.moves.urllib.parse.urlencode(sorted_signed_query_params), )
<SYSTEM_TASK:> Generate a V4 signed URL to provide query-string auth'n to a resource. <END_TASK> <USER_TASK:> Description: def generate_signed_url_v4( credentials, resource, expiration, api_access_endpoint=DEFAULT_ENDPOINT, method="GET", content_md5=None, content_type=None, response_type=None, response_disposition=None, generation=None, headers=None, query_parameters=None, _request_timestamp=None, # for testing only ): """Generate a V4 signed URL to provide query-string auth'n to a resource. .. note:: Assumes ``credentials`` implements the :class:`google.auth.credentials.Signing` interface. Also assumes ``credentials`` has a ``service_account_email`` property which identifies the credentials. .. note:: If you are on Google Compute Engine, you can't generate a signed URL. Follow `Issue 922`_ for updates on this. If you'd like to be able to generate a signed URL from GCE, you can use a standard service account from a JSON file rather than a GCE service account. See headers `reference`_ for more details on optional arguments. .. _Issue 922: https://github.com/GoogleCloudPlatform/\ google-cloud-python/issues/922 .. _reference: https://cloud.google.com/storage/docs/reference-headers :type credentials: :class:`google.auth.credentials.Signing` :param credentials: Credentials object with an associated private key to sign text. :type resource: str :param resource: A pointer to a specific resource (typically, ``/bucket-name/path/to/blob.txt``). :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. :type api_access_endpoint: str :param api_access_endpoint: Optional URI base. Defaults to "https://storage.googleapis.com/" :type method: str :param method: The HTTP verb that will be used when requesting the URL. Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the signature will additionally contain the `x-goog-resumable` header, and the method changed to POST. See the signed URL docs regarding this flow: https://cloud.google.com/storage/docs/access-control/signed-urls :type content_md5: str :param content_md5: (Optional) The MD5 hash of the object referenced by ``resource``. :type content_type: str :param content_type: (Optional) The content type of the object referenced by ``resource``. :type response_type: str :param response_type: (Optional) Content type of responses to requests for the signed URL. Used to over-ride the content type of the underlying resource. :type response_disposition: str :param response_disposition: (Optional) Content disposition of responses to requests for the signed URL. :type generation: str :param generation: (Optional) A value that indicates which generation of the resource to fetch. :type headers: dict :param headers: (Optional) Additional HTTP headers to be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers Requests using the signed URL *must* pass the specified header (name and value) with each request for the URL. :type query_parameters: dict :param query_parameters: (Optional) Additional query paramtersto be included as part of the signed URLs. See: https://cloud.google.com/storage/docs/xml-api/reference-headers#query :raises: :exc:`TypeError` when expiration is not a valid type. :raises: :exc:`AttributeError` if credentials is not an instance of :class:`google.auth.credentials.Signing`. :rtype: str :returns: A signed URL you can use to access the resource until expiration. """
ensure_signed_credentials(credentials) expiration_seconds = get_expiration_seconds_v4(expiration) if _request_timestamp is None: now = NOW() request_timestamp = now.strftime("%Y%m%dT%H%M%SZ") datestamp = now.date().strftime("%Y%m%d") else: request_timestamp = _request_timestamp datestamp = _request_timestamp[:8] client_email = credentials.signer_email credential_scope = "{}/auto/storage/goog4_request".format(datestamp) credential = "{}/{}".format(client_email, credential_scope) if headers is None: headers = {} if content_type is not None: headers["Content-Type"] = content_type if content_md5 is not None: headers["Content-MD5"] = content_md5 header_names = [key.lower() for key in headers] if "host" not in header_names: headers["Host"] = "storage.googleapis.com" if method.upper() == "RESUMABLE": method = "POST" headers["x-goog-resumable"] = "start" canonical_headers, ordered_headers = get_canonical_headers(headers) canonical_header_string = ( "\n".join(canonical_headers) + "\n" ) # Yes, Virginia, the extra newline is part of the spec. signed_headers = ";".join([key for key, _ in ordered_headers]) if query_parameters is None: query_parameters = {} else: query_parameters = {key: value or "" for key, value in query_parameters.items()} query_parameters["X-Goog-Algorithm"] = "GOOG4-RSA-SHA256" query_parameters["X-Goog-Credential"] = credential query_parameters["X-Goog-Date"] = request_timestamp query_parameters["X-Goog-Expires"] = expiration_seconds query_parameters["X-Goog-SignedHeaders"] = signed_headers if response_type is not None: query_parameters["response-content-type"] = response_type if response_disposition is not None: query_parameters["response-content-disposition"] = response_disposition if generation is not None: query_parameters["generation"] = generation ordered_query_parameters = sorted(query_parameters.items()) canonical_query_string = six.moves.urllib.parse.urlencode(ordered_query_parameters) canonical_elements = [ method, resource, canonical_query_string, canonical_header_string, signed_headers, "UNSIGNED-PAYLOAD", ] canonical_request = "\n".join(canonical_elements) canonical_request_hash = hashlib.sha256( canonical_request.encode("ascii") ).hexdigest() string_elements = [ "GOOG4-RSA-SHA256", request_timestamp, credential_scope, canonical_request_hash, ] string_to_sign = "\n".join(string_elements) signature_bytes = credentials.sign_bytes(string_to_sign.encode("ascii")) signature = binascii.hexlify(signature_bytes).decode("ascii") return "{}{}?{}&X-Goog-Signature={}".format( api_access_endpoint, resource, canonical_query_string, signature )
<SYSTEM_TASK:> Return a fully-qualified glossary string. <END_TASK> <USER_TASK:> Description: def glossary_path(cls, project, location, glossary): """Return a fully-qualified glossary string."""
return google.api_core.path_template.expand( "projects/{project}/locations/{location}/glossaries/{glossary}", project=project, location=location, glossary=glossary, )
<SYSTEM_TASK:> Translates input text and returns translated text. <END_TASK> <USER_TASK:> Description: def translate_text( self, contents, target_language_code, mime_type=None, source_language_code=None, parent=None, model=None, glossary_config=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Translates input text and returns translated text. Example: >>> from google.cloud import translate_v3beta1 >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> >>> # TODO: Initialize `contents`: >>> contents = [] >>> >>> # TODO: Initialize `target_language_code`: >>> target_language_code = '' >>> >>> response = client.translate_text(contents, target_language_code) Args: contents (list[str]): Required. The content of the input in string format. We recommend the total contents to be less than 30k codepoints. Please use BatchTranslateText for larger text. target_language_code (str): Required. The BCP-47 language code to use for translation of the input text, set to one of the language codes listed in Language Support. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type is assumed to be "text/html". source_language_code (str): Optional. The BCP-47 language code of the input text if known, for example, "en-US" or "sr-Latn". Supported language codes are listed in Language Support. If the source language isn't specified, the API attempts to identify the source language automatically and returns the the source language within the response. parent (str): Optional. Only used when making regionalized call. Format: projects/{project-id}/locations/{location-id}. Only custom model/glossary within the same location-id can be used. Otherwise 400 is returned. model (str): Optional. The ``model`` type requested for this translation. The format depends on model type: 1. Custom models: projects/{project-id}/locations/{location-id}/models/{model-id}. 2. General (built-in) models: projects/{project-id}/locations/{location-id}/models/general/nmt projects/{project-id}/locations/{location-id}/models/general/base For global (non-regionalized) requests, use {location-id} 'global'. For example, projects/{project-id}/locations/global/models/general/nmt If missing, the system decides which google base model to use. glossary_config (Union[dict, ~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig]): Optional. Glossary to be applied. The glossary needs to be in the same region as the model, otherwise an INVALID\_ARGUMENT error is returned. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.translate_v3beta1.types.TranslateTextResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "translate_text" not in self._inner_api_calls: self._inner_api_calls[ "translate_text" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.translate_text, default_retry=self._method_configs["TranslateText"].retry, default_timeout=self._method_configs["TranslateText"].timeout, client_info=self._client_info, ) request = translation_service_pb2.TranslateTextRequest( contents=contents, target_language_code=target_language_code, mime_type=mime_type, source_language_code=source_language_code, parent=parent, model=model, glossary_config=glossary_config, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["translate_text"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Detects the language of text within a request. <END_TASK> <USER_TASK:> Description: def detect_language( self, parent=None, model=None, content=None, mime_type=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Detects the language of text within a request. Example: >>> from google.cloud import translate_v3beta1 >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> >>> response = client.detect_language() Args: parent (str): Optional. Only used when making regionalized call. Format: projects/{project-id}/locations/{location-id}. Only custom model within the same location-id can be used. Otherwise 400 is returned. model (str): Optional. The language detection model to be used. projects/{project-id}/locations/{location-id}/models/language-detection/{model-id} If not specified, default will be used. content (str): The content of the input stored as a string. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type is assumed to be "text/html". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.translate_v3beta1.types.DetectLanguageResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "detect_language" not in self._inner_api_calls: self._inner_api_calls[ "detect_language" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.detect_language, default_retry=self._method_configs["DetectLanguage"].retry, default_timeout=self._method_configs["DetectLanguage"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof(content=content) request = translation_service_pb2.DetectLanguageRequest( parent=parent, model=model, content=content, mime_type=mime_type ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["detect_language"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Returns a list of supported languages for translation. <END_TASK> <USER_TASK:> Description: def get_supported_languages( self, parent=None, display_language_code=None, model=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Returns a list of supported languages for translation. Example: >>> from google.cloud import translate_v3beta1 >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> >>> response = client.get_supported_languages() Args: parent (str): Optional. Used for making regionalized calls. Format: projects/{project-id}/locations/{location-id}. For global calls, use projects/{project-id}/locations/global. If missing, the call is treated as a global call. Only custom model within the same location-id can be used. Otherwise 400 is returned. display_language_code (str): Optional. The language to use to return localized, human readable names of supported languages. If missing, default language is ENGLISH. model (str): Optional. Get supported languages of this model. The format depends on model type: 1. Custom models: projects/{project-id}/locations/{location-id}/models/{model-id}. 2. General (built-in) models: projects/{project-id}/locations/{location-id}/models/general/nmt projects/{project-id}/locations/{location-id}/models/general/base Returns languages supported by the specified model. If missing, we get supported languages of Google general NMT model. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.translate_v3beta1.types.SupportedLanguages` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "get_supported_languages" not in self._inner_api_calls: self._inner_api_calls[ "get_supported_languages" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_supported_languages, default_retry=self._method_configs["GetSupportedLanguages"].retry, default_timeout=self._method_configs["GetSupportedLanguages"].timeout, client_info=self._client_info, ) request = translation_service_pb2.GetSupportedLanguagesRequest( parent=parent, display_language_code=display_language_code, model=model ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["get_supported_languages"]( request, retry=retry, timeout=timeout, metadata=metadata )
<SYSTEM_TASK:> Creates a glossary and returns the long-running operation. Returns <END_TASK> <USER_TASK:> Description: def create_glossary( self, parent, glossary, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a glossary and returns the long-running operation. Returns NOT\_FOUND, if the project doesn't exist. Example: >>> from google.cloud import translate_v3beta1 >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> >>> # TODO: Initialize `glossary`: >>> glossary = {} >>> >>> response = client.create_glossary(parent, glossary) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The project name. glossary (Union[dict, ~google.cloud.translate_v3beta1.types.Glossary]): Required. The glossary to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.Glossary` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.translate_v3beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "create_glossary" not in self._inner_api_calls: self._inner_api_calls[ "create_glossary" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_glossary, default_retry=self._method_configs["CreateGlossary"].retry, default_timeout=self._method_configs["CreateGlossary"].timeout, client_info=self._client_info, ) request = translation_service_pb2.CreateGlossaryRequest( parent=parent, glossary=glossary ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_glossary"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, translation_service_pb2.Glossary, metadata_type=translation_service_pb2.CreateGlossaryMetadata, )
<SYSTEM_TASK:> Deletes a glossary, or cancels glossary construction if the glossary <END_TASK> <USER_TASK:> Description: def delete_glossary( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes a glossary, or cancels glossary construction if the glossary isn't created yet. Returns NOT\_FOUND, if the glossary doesn't exist. Example: >>> from google.cloud import translate_v3beta1 >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> >>> name = client.glossary_path('[PROJECT]', '[LOCATION]', '[GLOSSARY]') >>> >>> response = client.delete_glossary(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. The name of the glossary to delete. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.translate_v3beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "delete_glossary" not in self._inner_api_calls: self._inner_api_calls[ "delete_glossary" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_glossary, default_retry=self._method_configs["DeleteGlossary"].retry, default_timeout=self._method_configs["DeleteGlossary"].timeout, client_info=self._client_info, ) request = translation_service_pb2.DeleteGlossaryRequest(name=name) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["delete_glossary"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, translation_service_pb2.DeleteGlossaryResponse, metadata_type=translation_service_pb2.DeleteGlossaryMetadata, )
<SYSTEM_TASK:> Return a copy of time_series with the points removed. <END_TASK> <USER_TASK:> Description: def _extract_header(time_series): """Return a copy of time_series with the points removed."""
return TimeSeries( metric=time_series.metric, resource=time_series.resource, metric_kind=time_series.metric_kind, value_type=time_series.value_type, )
<SYSTEM_TASK:> Build the combined resource and metric labels, with resource_type. <END_TASK> <USER_TASK:> Description: def _extract_labels(time_series): """Build the combined resource and metric labels, with resource_type."""
labels = {"resource_type": time_series.resource.type} labels.update(time_series.resource.labels) labels.update(time_series.metric.labels) return labels
<SYSTEM_TASK:> Sort label names, putting well-known resource labels first. <END_TASK> <USER_TASK:> Description: def _sorted_resource_labels(labels): """Sort label names, putting well-known resource labels first."""
head = [label for label in TOP_RESOURCE_LABELS if label in labels] tail = sorted(label for label in labels if label not in TOP_RESOURCE_LABELS) return head + tail
<SYSTEM_TASK:> Starts a thread and marks it as a daemon thread. <END_TASK> <USER_TASK:> Description: def start_daemon_thread(*args, **kwargs): """Starts a thread and marks it as a daemon thread."""
thread = threading.Thread(*args, **kwargs) thread.daemon = True thread.start() return thread
<SYSTEM_TASK:> Invoke a callback, swallowing and logging any exceptions. <END_TASK> <USER_TASK:> Description: def safe_invoke_callback(callback, *args, **kwargs): """Invoke a callback, swallowing and logging any exceptions."""
# pylint: disable=bare-except # We intentionally want to swallow all exceptions. try: return callback(*args, **kwargs) except Exception: _LOGGER.exception("Error while executing Future callback.")
<SYSTEM_TASK:> Create a project bound to the current client. <END_TASK> <USER_TASK:> Description: def new_project(self, project_id, name=None, labels=None): """Create a project bound to the current client. Use :meth:`Project.reload() \ <google.cloud.resource_manager.project.Project.reload>` to retrieve project metadata after creating a :class:`~google.cloud.resource_manager.project.Project` instance. .. note: This does not make an API call. :type project_id: str :param project_id: The ID for this project. :type name: str :param name: The display name of the project. :type labels: dict :param labels: A list of labels associated with the project. :rtype: :class:`~google.cloud.resource_manager.project.Project` :returns: A new instance of a :class:`~google.cloud.resource_manager.project.Project` **without** any metadata loaded. """
return Project(project_id=project_id, client=self, name=name, labels=labels)
<SYSTEM_TASK:> Fetch an existing project and it's relevant metadata by ID. <END_TASK> <USER_TASK:> Description: def fetch_project(self, project_id): """Fetch an existing project and it's relevant metadata by ID. .. note:: If the project does not exist, this will raise a :class:`NotFound <google.cloud.exceptions.NotFound>` error. :type project_id: str :param project_id: The ID for this project. :rtype: :class:`~google.cloud.resource_manager.project.Project` :returns: A :class:`~google.cloud.resource_manager.project.Project` with metadata fetched from the API. """
project = self.new_project(project_id) project.reload() return project
<SYSTEM_TASK:> List the projects visible to this client. <END_TASK> <USER_TASK:> Description: def list_projects(self, filter_params=None, page_size=None): """List the projects visible to this client. Example:: >>> from google.cloud import resource_manager >>> client = resource_manager.Client() >>> for project in client.list_projects(): ... print(project.project_id) List all projects with label ``'environment'`` set to ``'prod'`` (filtering by labels):: >>> from google.cloud import resource_manager >>> client = resource_manager.Client() >>> env_filter = {'labels.environment': 'prod'} >>> for project in client.list_projects(env_filter): ... print(project.project_id) See https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/list Complete filtering example:: >>> project_filter = { # Return projects with... ... 'name': 'My Project', # name set to 'My Project'. ... 'id': 'my-project-id', # id set to 'my-project-id'. ... 'labels.stage': 'prod', # the label 'stage' set to 'prod' ... 'labels.color': '*' # a label 'color' set to anything. ... } >>> client.list_projects(project_filter) :type filter_params: dict :param filter_params: (Optional) A dictionary of filter options where each key is a property to filter on, and each value is the (case-insensitive) value to check (or the glob ``*`` to check for existence of the property). See the example above for more details. :type page_size: int :param page_size: (Optional) The maximum number of projects in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.resource_manager.project.Project`. that the current user has access to. """
extra_params = {} if page_size is not None: extra_params["pageSize"] = page_size if filter_params is not None: extra_params["filter"] = [ "{}:{}".format(key, value) for key, value in six.iteritems(filter_params) ] return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, path="/projects", item_to_value=_item_to_project, items_key="projects", extra_params=extra_params, )
<SYSTEM_TASK:> Like ``ExecuteSql``, except returns the result set as a stream. Unlike <END_TASK> <USER_TASK:> Description: def execute_streaming_sql( self, session, sql, transaction=None, params=None, param_types=None, resume_token=None, query_mode=None, partition_token=None, seqno=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Like ``ExecuteSql``, except returns the result set as a stream. Unlike ``ExecuteSql``, there is no limit on the size of the returned result set. However, no individual row in the result set can exceed 100 MiB, and no column value can exceed 10 MiB. Example: >>> from google.cloud import spanner_v1 >>> >>> client = spanner_v1.SpannerClient() >>> >>> session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') >>> >>> # TODO: Initialize `sql`: >>> sql = '' >>> >>> for element in client.execute_streaming_sql(session, sql): ... # process element ... pass Args: session (str): Required. The session in which the SQL query should be performed. sql (str): Required. The SQL string. transaction (Union[dict, ~google.cloud.spanner_v1.types.TransactionSelector]): The transaction to use. If none is provided, the default is a temporary read-only transaction with strong concurrency. The transaction to use. For queries, if none is provided, the default is a temporary read-only transaction with strong concurrency. Standard DML statements require a ReadWrite transaction. Single-use transactions are not supported (to avoid replay). The caller must either supply an existing transaction ID or begin a new transaction. Partitioned DML requires an existing PartitionedDml transaction ID. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.TransactionSelector` params (Union[dict, ~google.cloud.spanner_v1.types.Struct]): The SQL string can contain parameter placeholders. A parameter placeholder consists of ``'@'`` followed by the parameter name. Parameter names consist of any combination of letters, numbers, and underscores. Parameters can appear anywhere that a literal value is expected. The same parameter name can be used more than once, for example: ``"WHERE id > @msg_id AND id < @msg_id + 100"`` It is an error to execute an SQL statement with unbound parameters. Parameter values are specified using ``params``, which is a JSON object whose keys are parameter names, and whose values are the corresponding parameter values. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.Struct` param_types (dict[str -> Union[dict, ~google.cloud.spanner_v1.types.Type]]): It is not always possible for Cloud Spanner to infer the right SQL type from a JSON value. For example, values of type ``BYTES`` and values of type ``STRING`` both appear in ``params`` as JSON strings. In these cases, ``param_types`` can be used to specify the exact SQL type for some or all of the SQL statement parameters. See the definition of ``Type`` for more information about SQL types. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.Type` resume_token (bytes): If this request is resuming a previously interrupted SQL statement execution, ``resume_token`` should be copied from the last ``PartialResultSet`` yielded before the interruption. Doing this enables the new SQL statement execution to resume where the last one left off. The rest of the request parameters must exactly match the request that yielded this token. query_mode (~google.cloud.spanner_v1.types.QueryMode): Used to control the amount of debugging information returned in ``ResultSetStats``. If ``partition_token`` is set, ``query_mode`` can only be set to ``QueryMode.NORMAL``. partition_token (bytes): If present, results will be restricted to the specified partition previously created using PartitionQuery(). There must be an exact match for the values of fields common to this message and the PartitionQueryRequest message used to create this partition\_token. seqno (long): A per-transaction sequence number used to identify this request. This makes each request idempotent such that if the request is received multiple times, at most one will succeed. The sequence number must be monotonically increasing within the transaction. If a request arrives for the first time with an out-of-order sequence number, the transaction may be aborted. Replays of previously handled requests will yield the same response as the first execution. Required for DML statements. Ignored for queries. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: Iterable[~google.cloud.spanner_v1.types.PartialResultSet]. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """
# Wrap the transport method to add retry and timeout logic. if "execute_streaming_sql" not in self._inner_api_calls: self._inner_api_calls[ "execute_streaming_sql" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.execute_streaming_sql, default_retry=self._method_configs["ExecuteStreamingSql"].retry, default_timeout=self._method_configs["ExecuteStreamingSql"].timeout, client_info=self._client_info, ) request = spanner_pb2.ExecuteSqlRequest( session=session, sql=sql, transaction=transaction, params=params, param_types=param_types, resume_token=resume_token, query_mode=query_mode, partition_token=partition_token, seqno=seqno, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("session", session)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["execute_streaming_sql"]( request, retry=retry, timeout=timeout, metadata=metadata )