text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Return a fully-qualified asset string.
<END_TASK>
<USER_TASK:>
Description:
def asset_path(cls, organization, asset):
"""Return a fully-qualified asset string.""" |
return google.api_core.path_template.expand(
"organizations/{organization}/assets/{asset}",
organization=organization,
asset=asset,
) |
<SYSTEM_TASK:>
Return a fully-qualified asset_security_marks string.
<END_TASK>
<USER_TASK:>
Description:
def asset_security_marks_path(cls, organization, asset):
"""Return a fully-qualified asset_security_marks string.""" |
return google.api_core.path_template.expand(
"organizations/{organization}/assets/{asset}/securityMarks",
organization=organization,
asset=asset,
) |
<SYSTEM_TASK:>
Creates a source.
<END_TASK>
<USER_TASK:>
Description:
def create_source(
self,
parent,
source,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a source.
Example:
>>> from google.cloud import securitycenter_v1
>>>
>>> client = securitycenter_v1.SecurityCenterClient()
>>>
>>> parent = client.organization_path('[ORGANIZATION]')
>>>
>>> # TODO: Initialize `source`:
>>> source = {}
>>>
>>> response = client.create_source(parent, source)
Args:
parent (str): Resource name of the new source's parent. Its format should be
"organizations/[organization\_id]".
source (Union[dict, ~google.cloud.securitycenter_v1.types.Source]): The Source being created, only the display\_name and description will be
used. All other fields will be ignored.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1.types.Source`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.securitycenter_v1.types.Source` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_source" not in self._inner_api_calls:
self._inner_api_calls[
"create_source"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_source,
default_retry=self._method_configs["CreateSource"].retry,
default_timeout=self._method_configs["CreateSource"].timeout,
client_info=self._client_info,
)
request = securitycenter_service_pb2.CreateSourceRequest(
parent=parent, source=source
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_source"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Creates a finding. The corresponding source must exist for finding creation
<END_TASK>
<USER_TASK:>
Description:
def create_finding(
self,
parent,
finding_id,
finding,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a finding. The corresponding source must exist for finding creation
to succeed.
Example:
>>> from google.cloud import securitycenter_v1
>>>
>>> client = securitycenter_v1.SecurityCenterClient()
>>>
>>> parent = client.source_path('[ORGANIZATION]', '[SOURCE]')
>>>
>>> # TODO: Initialize `finding_id`:
>>> finding_id = ''
>>>
>>> # TODO: Initialize `finding`:
>>> finding = {}
>>>
>>> response = client.create_finding(parent, finding_id, finding)
Args:
parent (str): Resource name of the new finding's parent. Its format should be
"organizations/[organization\_id]/sources/[source\_id]".
finding_id (str): Unique identifier provided by the client within the parent scope.
It must be alphanumeric and less than or equal to 32 characters and
greater than 0 characters in length.
finding (Union[dict, ~google.cloud.securitycenter_v1.types.Finding]): The Finding being created. The name and security\_marks will be ignored
as they are both output only fields on this resource.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1.types.Finding`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.securitycenter_v1.types.Finding` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_finding" not in self._inner_api_calls:
self._inner_api_calls[
"create_finding"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_finding,
default_retry=self._method_configs["CreateFinding"].retry,
default_timeout=self._method_configs["CreateFinding"].timeout,
client_info=self._client_info,
)
request = securitycenter_service_pb2.CreateFindingRequest(
parent=parent, finding_id=finding_id, finding=finding
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_finding"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Runs asset discovery. The discovery is tracked with a long-running
<END_TASK>
<USER_TASK:>
Description:
def run_asset_discovery(
self,
parent,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Runs asset discovery. The discovery is tracked with a long-running
operation.
This API can only be called with limited frequency for an organization.
If it is called too frequently the caller will receive a
TOO\_MANY\_REQUESTS error.
Example:
>>> from google.cloud import securitycenter_v1
>>>
>>> client = securitycenter_v1.SecurityCenterClient()
>>>
>>> parent = client.organization_path('[ORGANIZATION]')
>>>
>>> response = client.run_asset_discovery(parent)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Name of the organization to run asset discovery for. Its format is
"organizations/[organization\_id]".
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.securitycenter_v1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "run_asset_discovery" not in self._inner_api_calls:
self._inner_api_calls[
"run_asset_discovery"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.run_asset_discovery,
default_retry=self._method_configs["RunAssetDiscovery"].retry,
default_timeout=self._method_configs["RunAssetDiscovery"].timeout,
client_info=self._client_info,
)
request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["run_asset_discovery"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=empty_pb2.Empty,
) |
<SYSTEM_TASK:>
Updates security marks.
<END_TASK>
<USER_TASK:>
Description:
def update_security_marks(
self,
security_marks,
update_mask=None,
start_time=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates security marks.
Example:
>>> from google.cloud import securitycenter_v1
>>>
>>> client = securitycenter_v1.SecurityCenterClient()
>>>
>>> # TODO: Initialize `security_marks`:
>>> security_marks = {}
>>>
>>> response = client.update_security_marks(security_marks)
Args:
security_marks (Union[dict, ~google.cloud.securitycenter_v1.types.SecurityMarks]): The security marks resource to update.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1.types.SecurityMarks`
update_mask (Union[dict, ~google.cloud.securitycenter_v1.types.FieldMask]): The FieldMask to use when updating the security marks resource.
The field mask must not contain duplicate fields. If empty or set to
"marks", all marks will be replaced. Individual marks can be updated
using "marks.<mark\_key>".
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1.types.FieldMask`
start_time (Union[dict, ~google.cloud.securitycenter_v1.types.Timestamp]): The time at which the updated SecurityMarks take effect.
If not set uses current server time. Updates will be applied to the
SecurityMarks that are active immediately preceding this time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1.types.Timestamp`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.securitycenter_v1.types.SecurityMarks` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "update_security_marks" not in self._inner_api_calls:
self._inner_api_calls[
"update_security_marks"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_security_marks,
default_retry=self._method_configs["UpdateSecurityMarks"].retry,
default_timeout=self._method_configs["UpdateSecurityMarks"].timeout,
client_info=self._client_info,
)
request = securitycenter_service_pb2.UpdateSecurityMarksRequest(
security_marks=security_marks,
update_mask=update_mask,
start_time=start_time,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("security_marks.name", security_marks.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["update_security_marks"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Add row range to row_ranges list from the row keys
<END_TASK>
<USER_TASK:>
Description:
def add_row_range_from_keys(
self, start_key=None, end_key=None, start_inclusive=True, end_inclusive=False
):
"""Add row range to row_ranges list from the row keys
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_row_range_from_keys]
:end-before: [END bigtable_row_range_from_keys]
:type start_key: bytes
:param start_key: (Optional) Start key of the row range. If left empty,
will be interpreted as the empty string.
:type end_key: bytes
:param end_key: (Optional) End key of the row range. If left empty,
will be interpreted as the empty string and range will
be unbounded on the high end.
:type start_inclusive: bool
:param start_inclusive: (Optional) Whether the ``start_key`` should be
considered inclusive. The default is True (inclusive).
:type end_inclusive: bool
:param end_inclusive: (Optional) Whether the ``end_key`` should be
considered inclusive. The default is False (exclusive).
""" |
row_range = RowRange(start_key, end_key, start_inclusive, end_inclusive)
self.row_ranges.append(row_range) |
<SYSTEM_TASK:>
Add row keys and row range to given request message
<END_TASK>
<USER_TASK:>
Description:
def _update_message_request(self, message):
"""Add row keys and row range to given request message
:type message: class:`data_messages_v2_pb2.ReadRowsRequest`
:param message: The ``ReadRowsRequest`` protobuf
""" |
for each in self.row_keys:
message.rows.row_keys.append(_to_bytes(each))
for each in self.row_ranges:
r_kwrags = each.get_range_kwargs()
message.rows.row_ranges.add(**r_kwrags) |
<SYSTEM_TASK:>
Convert row range object to dict which can be passed to
<END_TASK>
<USER_TASK:>
Description:
def get_range_kwargs(self):
""" Convert row range object to dict which can be passed to
google.bigtable.v2.RowRange add method.
""" |
range_kwargs = {}
if self.start_key is not None:
start_key_key = "start_key_open"
if self.start_inclusive:
start_key_key = "start_key_closed"
range_kwargs[start_key_key] = _to_bytes(self.start_key)
if self.end_key is not None:
end_key_key = "end_key_open"
if self.end_inclusive:
end_key_key = "end_key_closed"
range_kwargs[end_key_key] = _to_bytes(self.end_key)
return range_kwargs |
<SYSTEM_TASK:>
Builds the Error Reporting object to report.
<END_TASK>
<USER_TASK:>
Description:
def _build_error_report(
self, message, report_location=None, http_context=None, user=None
):
"""Builds the Error Reporting object to report.
This builds the object according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: The stack trace that was reported or logged by the
service.
:type report_location: dict
:param report_location: The location in the source code where the
decision was made to report the error, usually the place
where it was logged. For a logged exception this would be the
source line where the exception is logged, usually close to
the place where it was caught.
This should be a Python dict that contains the keys 'filePath',
'lineNumber', and 'functionName'
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This can
be a user ID, an email address, or an arbitrary token that
uniquely identifies the user. When sending an error
report, leave this field empty if the user was not
logged in. In this case the Error Reporting system will
use other data, such as remote IP address,
to distinguish affected users.
:rtype: dict
:returns: A dict payload ready to be serialized to JSON and sent to
the API.
""" |
payload = {
"serviceContext": {"service": self.service},
"message": "{0}".format(message),
}
if self.version:
payload["serviceContext"]["version"] = self.version
if report_location or http_context or user:
payload["context"] = {}
if report_location:
payload["context"]["reportLocation"] = report_location
if http_context:
http_context_dict = http_context.__dict__
# strip out None values
payload["context"]["httpRequest"] = {
key: value
for key, value in six.iteritems(http_context_dict)
if value is not None
}
if user:
payload["context"]["user"] = user
return payload |
<SYSTEM_TASK:>
Makes the call to the Error Reporting API.
<END_TASK>
<USER_TASK:>
Description:
def _send_error_report(
self, message, report_location=None, http_context=None, user=None
):
"""Makes the call to the Error Reporting API.
This is the lower-level interface to build and send the payload,
generally users will use either report() or report_exception() to
automatically gather the parameters for this method.
:type message: str
:param message: The stack trace that was reported or logged by the
service.
:type report_location: dict
:param report_location: The location in the source code where the
decision was made to report the error, usually the place
where it was logged. For a logged exception this would be the
source line where the exception is logged, usually close to
the place where it was caught.
This should be a Python dict that contains the keys 'filePath',
'lineNumber', and 'functionName'
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This can
be a user ID, an email address, or an arbitrary token that
uniquely identifies the user. When sending an error
report, leave this field empty if the user was not
logged in. In this case the Error Reporting system will
use other data, such as remote IP address,
to distinguish affected users.
""" |
error_report = self._build_error_report(
message, report_location, http_context, user
)
self.report_errors_api.report_error_event(error_report) |
<SYSTEM_TASK:>
Reports a message to Stackdriver Error Reporting
<END_TASK>
<USER_TASK:>
Description:
def report(self, message, http_context=None, user=None):
""" Reports a message to Stackdriver Error Reporting
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: A user-supplied message to report
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending
an error report, leave this field empty if the user
was not logged in. In this case the Error Reporting
system will use other data, such as remote IP address,
to distinguish affected users.
Example:
.. code-block:: python
>>> client.report("Something went wrong!")
""" |
stack = traceback.extract_stack()
last_call = stack[-2]
file_path = last_call[0]
line_number = last_call[1]
function_name = last_call[2]
report_location = {
"filePath": file_path,
"lineNumber": line_number,
"functionName": function_name,
}
self._send_error_report(
message,
http_context=http_context,
user=user,
report_location=report_location,
) |
<SYSTEM_TASK:>
Reports the details of the latest exceptions to Stackdriver Error
<END_TASK>
<USER_TASK:>
Description:
def report_exception(self, http_context=None, user=None):
""" Reports the details of the latest exceptions to Stackdriver Error
Reporting.
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending an
error report, leave this field empty if the user was
not logged in. In this case the Error Reporting system
will use other data, such as remote IP address,
to distinguish affected users.
Example::
>>> try:
>>> raise NameError
>>> except Exception:
>>> client.report_exception()
""" |
self._send_error_report(
traceback.format_exc(), http_context=http_context, user=user
) |
<SYSTEM_TASK:>
Return the topmost transaction.
<END_TASK>
<USER_TASK:>
Description:
def current(self):
"""Return the topmost transaction.
.. note::
If the topmost element on the stack is not a transaction,
returns None.
:rtype: :class:`google.cloud.datastore.transaction.Transaction` or None
:returns: The current transaction (if any are active).
""" |
top = super(Transaction, self).current()
if isinstance(top, Transaction):
return top |
<SYSTEM_TASK:>
Begins a transaction.
<END_TASK>
<USER_TASK:>
Description:
def begin(self):
"""Begins a transaction.
This method is called automatically when entering a with
statement, however it can be called explicitly if you don't want
to use a context manager.
:raises: :class:`~exceptions.ValueError` if the transaction has
already begun.
""" |
super(Transaction, self).begin()
try:
response_pb = self._client._datastore_api.begin_transaction(self.project)
self._id = response_pb.transaction
except: # noqa: E722 do not use bare except, specify exception instead
self._status = self._ABORTED
raise |
<SYSTEM_TASK:>
Rolls back the current transaction.
<END_TASK>
<USER_TASK:>
Description:
def rollback(self):
"""Rolls back the current transaction.
This method has necessary side-effects:
- Sets the current transaction's ID to None.
""" |
try:
# No need to use the response it contains nothing.
self._client._datastore_api.rollback(self.project, self._id)
finally:
super(Transaction, self).rollback()
# Clear our own ID in case this gets accidentally reused.
self._id = None |
<SYSTEM_TASK:>
Adds an entity to be committed.
<END_TASK>
<USER_TASK:>
Description:
def put(self, entity):
"""Adds an entity to be committed.
Ensures the transaction is not marked readonly.
Please see documentation at
:meth:`~google.cloud.datastore.batch.Batch.put`
:type entity: :class:`~google.cloud.datastore.entity.Entity`
:param entity: the entity to be saved.
:raises: :class:`RuntimeError` if the transaction
is marked ReadOnly
""" |
if self._options.HasField("read_only"):
raise RuntimeError("Transaction is read only")
else:
super(Transaction, self).put(entity) |
<SYSTEM_TASK:>
Lists an organization or source's findings.
<END_TASK>
<USER_TASK:>
Description:
def list_findings(
self,
parent,
filter_=None,
order_by=None,
read_time=None,
field_mask=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists an organization or source's findings.
To list across all sources provide a ``-`` as the source id. Example:
/v1beta1/organizations/123/sources/-/findings
Example:
>>> from google.cloud import securitycenter_v1beta1
>>>
>>> client = securitycenter_v1beta1.SecurityCenterClient()
>>>
>>> parent = client.source_path('[ORGANIZATION]', '[SOURCE]')
>>>
>>> # Iterate over all results
>>> for element in client.list_findings(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_findings(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Name of the source the findings belong to. Its format is
"organizations/[organization\_id]/sources/[source\_id]". To list across
all sources provide a source\_id of ``-``. For example:
organizations/123/sources/-
filter_ (str): Expression that defines the filter to apply across findings. The
expression is a list of one or more restrictions combined via logical
operators ``AND`` and ``OR``. Parentheses are not supported, and ``OR``
has higher precedence than ``AND``.
Restrictions have the form ``<field> <operator> <value>`` and may have a
``-`` character in front of them to indicate negation. Examples include:
- name
- source\_properties.a\_property
- security\_marks.marks.marka
The supported operators are:
- ``=`` for all value types.
- ``>``, ``<``, ``>=``, ``<=`` for integer values.
- ``:``, meaning substring matching, for strings.
The supported value types are:
- string literals in quotes.
- integer literals without quotes.
- boolean literals ``true`` and ``false`` without quotes.
For example, ``source_properties.size = 100`` is a valid filter string.
order_by (str): Expression that defines what fields and order to use for sorting. The
string value should follow SQL syntax: comma separated list of fields.
For example: "name,resource\_properties.a\_property". The default
sorting order is ascending. To specify descending order for a field, a
suffix " desc" should be appended to the field name. For example: "name
desc,source\_properties.a\_property". Redundant space characters in the
syntax are insignificant. "name desc,source\_properties.a\_property" and
" name desc , source\_properties.a\_property " are equivalent.
read_time (Union[dict, ~google.cloud.securitycenter_v1beta1.types.Timestamp]): Time used as a reference point when filtering findings. The filter is
limited to findings existing at the supplied time and their values are
those at that specific time. Absence of this field will default to the
API's version of NOW.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1beta1.types.Timestamp`
field_mask (Union[dict, ~google.cloud.securitycenter_v1beta1.types.FieldMask]): Optional.
A field mask to specify the Finding fields to be listed in the response.
An empty field mask will list all fields.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.securitycenter_v1beta1.types.FieldMask`
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.securitycenter_v1beta1.types.Finding` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "list_findings" not in self._inner_api_calls:
self._inner_api_calls[
"list_findings"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_findings,
default_retry=self._method_configs["ListFindings"].retry,
default_timeout=self._method_configs["ListFindings"].timeout,
client_info=self._client_info,
)
request = securitycenter_service_pb2.ListFindingsRequest(
parent=parent,
filter=filter_,
order_by=order_by,
read_time=read_time,
field_mask=field_mask,
page_size=page_size,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_findings"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="findings",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator |
<SYSTEM_TASK:>
Convert a dict to protobuf.
<END_TASK>
<USER_TASK:>
Description:
def _dict_mapping_to_pb(mapping, proto_type):
"""
Convert a dict to protobuf.
Args:
mapping (dict): A dict that needs to be converted to protobuf.
proto_type (str): The type of the Protobuf.
Returns:
An instance of the specified protobuf.
""" |
converted_pb = getattr(trace_pb2, proto_type)()
ParseDict(mapping, converted_pb)
return converted_pb |
<SYSTEM_TASK:>
Convert a span attribute dict to protobuf, including Links, Attributes,
<END_TASK>
<USER_TASK:>
Description:
def _span_attrs_to_pb(span_attr, proto_type):
"""
Convert a span attribute dict to protobuf, including Links, Attributes,
TimeEvents.
Args:
span_attr (dict): A dict that needs to be converted to protobuf.
proto_type (str): The type of the Protobuf.
Returns:
An instance of the specified protobuf.
""" |
attr_pb = getattr(trace_pb2.Span, proto_type)()
ParseDict(span_attr, attr_pb)
return attr_pb |
<SYSTEM_TASK:>
Creates an cluster instance from a protobuf.
<END_TASK>
<USER_TASK:>
Description:
def from_pb(cls, cluster_pb, instance):
"""Creates an cluster instance from a protobuf.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_cluster_from_pb]
:end-before: [END bigtable_cluster_from_pb]
:type cluster_pb: :class:`instance_pb2.Cluster`
:param cluster_pb: An instance protobuf object.
:type instance: :class:`google.cloud.bigtable.instance.Instance`
:param instance: The instance that owns the cluster.
:rtype: :class:`Cluster`
:returns: The Cluster parsed from the protobuf response.
:raises: :class:`ValueError <exceptions.ValueError>` if the cluster
name does not match
``projects/{project}/instances/{instance_id}/clusters/{cluster_id}``
or if the parsed instance ID does not match the istance ID
on the client.
or if the parsed project ID does not match the project ID
on the client.
""" |
match_cluster_name = _CLUSTER_NAME_RE.match(cluster_pb.name)
if match_cluster_name is None:
raise ValueError(
"Cluster protobuf name was not in the " "expected format.",
cluster_pb.name,
)
if match_cluster_name.group("instance") != instance.instance_id:
raise ValueError(
"Instance ID on cluster does not match the " "instance ID on the client"
)
if match_cluster_name.group("project") != instance._client.project:
raise ValueError(
"Project ID on cluster does not match the " "project ID on the client"
)
cluster_id = match_cluster_name.group("cluster_id")
result = cls(cluster_id, instance)
result._update_from_pb(cluster_pb)
return result |
<SYSTEM_TASK:>
Cluster name used in requests.
<END_TASK>
<USER_TASK:>
Description:
def name(self):
"""Cluster name used in requests.
.. note::
This property will not change if ``_instance`` and ``cluster_id``
do not, but the return value is not cached.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_cluster_name]
:end-before: [END bigtable_cluster_name]
The cluster name is of the form
``"projects/{project}/instances/{instance}/clusters/{cluster_id}"``
:rtype: str
:returns: The cluster name.
""" |
return self._instance._client.instance_admin_client.cluster_path(
self._instance._client.project, self._instance.instance_id, self.cluster_id
) |
<SYSTEM_TASK:>
Reload the metadata for this cluster.
<END_TASK>
<USER_TASK:>
Description:
def reload(self):
"""Reload the metadata for this cluster.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_reload_cluster]
:end-before: [END bigtable_reload_cluster]
""" |
cluster_pb = self._instance._client.instance_admin_client.get_cluster(self.name)
# NOTE: _update_from_pb does not check that the project and
# cluster ID on the response match the request.
self._update_from_pb(cluster_pb) |
<SYSTEM_TASK:>
Check whether the cluster already exists.
<END_TASK>
<USER_TASK:>
Description:
def exists(self):
"""Check whether the cluster already exists.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_check_cluster_exists]
:end-before: [END bigtable_check_cluster_exists]
:rtype: bool
:returns: True if the table exists, else False.
""" |
client = self._instance._client
try:
client.instance_admin_client.get_cluster(name=self.name)
return True
# NOTE: There could be other exceptions that are returned to the user.
except NotFound:
return False |
<SYSTEM_TASK:>
Create this cluster.
<END_TASK>
<USER_TASK:>
Description:
def create(self):
"""Create this cluster.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_create_cluster]
:end-before: [END bigtable_create_cluster]
.. note::
Uses the ``project``, ``instance`` and ``cluster_id`` on the
current :class:`Cluster` in addition to the ``serve_nodes``.
To change them before creating, reset the values via
.. code:: python
cluster.serve_nodes = 8
cluster.cluster_id = 'i-changed-my-mind'
before calling :meth:`create`.
:rtype: :class:`~google.api_core.operation.Operation`
:returns: The long-running operation corresponding to the
create operation.
""" |
client = self._instance._client
cluster_pb = self._to_pb()
return client.instance_admin_client.create_cluster(
self._instance.name, self.cluster_id, cluster_pb
) |
<SYSTEM_TASK:>
Update this cluster.
<END_TASK>
<USER_TASK:>
Description:
def update(self):
"""Update this cluster.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_update_cluster]
:end-before: [END bigtable_update_cluster]
.. note::
Updates the ``serve_nodes``. If you'd like to
change them before updating, reset the values via
.. code:: python
cluster.serve_nodes = 8
before calling :meth:`update`.
:type location: :str:``CreationOnly``
:param location: The location where this cluster's nodes and storage
reside. For best performance, clients should be located as
close as possible to this cluster. Currently only zones are
supported, so values should be of the form
``projects/<project>/locations/<zone>``.
:type serve_nodes: :int
:param serve_nodes: The number of nodes allocated to this cluster.
More nodes enable higher throughput and more consistent
performance.
:rtype: :class:`Operation`
:returns: The long-running operation corresponding to the
update operation.
""" |
client = self._instance._client
# We are passing `None` for second argument location.
# Location is set only at the time of creation of a cluster
# and can not be changed after cluster has been created.
return client.instance_admin_client.update_cluster(
self.name, self.serve_nodes, None
) |
<SYSTEM_TASK:>
Delete this cluster.
<END_TASK>
<USER_TASK:>
Description:
def delete(self):
"""Delete this cluster.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_delete_cluster]
:end-before: [END bigtable_delete_cluster]
Marks a cluster and all of its tables for permanent deletion in 7 days.
Immediately upon completion of the request:
* Billing will cease for all of the cluster's reserved resources.
* The cluster's ``delete_time`` field will be set 7 days in the future.
Soon afterward:
* All tables within the cluster will become unavailable.
At the cluster's ``delete_time``:
* The cluster and **all of its tables** will immediately and
irrevocably disappear from the API, and their data will be
permanently deleted.
""" |
client = self._instance._client
client.instance_admin_client.delete_cluster(self.name) |
<SYSTEM_TASK:>
Create a new cell from a Cell protobuf.
<END_TASK>
<USER_TASK:>
Description:
def from_pb(cls, cell_pb):
"""Create a new cell from a Cell protobuf.
:type cell_pb: :class:`._generated.data_pb2.Cell`
:param cell_pb: The protobuf to convert.
:rtype: :class:`Cell`
:returns: The cell corresponding to the protobuf.
""" |
if cell_pb.labels:
return cls(cell_pb.value, cell_pb.timestamp_micros, labels=cell_pb.labels)
else:
return cls(cell_pb.value, cell_pb.timestamp_micros) |
<SYSTEM_TASK:>
Convert the cells to a dictionary.
<END_TASK>
<USER_TASK:>
Description:
def to_dict(self):
"""Convert the cells to a dictionary.
This is intended to be used with HappyBase, so the column family and
column qualiers are combined (with ``:``).
:rtype: dict
:returns: Dictionary containing all the data in the cells of this row.
""" |
result = {}
for column_family_id, columns in six.iteritems(self._cells):
for column_qual, cells in six.iteritems(columns):
key = _to_bytes(column_family_id) + b":" + _to_bytes(column_qual)
result[key] = cells
return result |
<SYSTEM_TASK:>
Get a single cell value stored on this instance.
<END_TASK>
<USER_TASK:>
Description:
def cell_value(self, column_family_id, column, index=0):
"""Get a single cell value stored on this instance.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_row_cell_value]
:end-before: [END bigtable_row_cell_value]
Args:
column_family_id (str): The ID of the column family. Must be of the
form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
column (bytes): The column within the column family where the cell
is located.
index (Optional[int]): The offset within the series of values. If
not specified, will return the first cell.
Returns:
~google.cloud.bigtable.row_data.Cell value: The cell value stored
in the specified column and specified index.
Raises:
KeyError: If ``column_family_id`` is not among the cells stored
in this row.
KeyError: If ``column`` is not among the cells stored in this row
for the given ``column_family_id``.
IndexError: If ``index`` cannot be found within the cells stored
in this row for the given ``column_family_id``, ``column``
pair.
""" |
cells = self.find_cells(column_family_id, column)
try:
cell = cells[index]
except (TypeError, IndexError):
num_cells = len(cells)
msg = _MISSING_INDEX.format(index, column, column_family_id, num_cells)
raise IndexError(msg)
return cell.value |
<SYSTEM_TASK:>
Consume the streamed responses until there are no more.
<END_TASK>
<USER_TASK:>
Description:
def consume_all(self, max_loops=None):
"""Consume the streamed responses until there are no more.
.. warning::
This method will be removed in future releases. Please use this
class as a generator instead.
:type max_loops: int
:param max_loops: (Optional) Maximum number of times to try to consume
an additional ``ReadRowsResponse``. You can use this
to avoid long wait times.
""" |
for row in self:
self.rows[row.row_key] = row |
<SYSTEM_TASK:>
Updates the given message request as per last scanned key
<END_TASK>
<USER_TASK:>
Description:
def build_updated_request(self):
""" Updates the given message request as per last scanned key
""" |
r_kwargs = {
"table_name": self.message.table_name,
"filter": self.message.filter,
}
if self.message.rows_limit != 0:
r_kwargs["rows_limit"] = max(
1, self.message.rows_limit - self.rows_read_so_far
)
# if neither RowSet.row_keys nor RowSet.row_ranges currently exist,
# add row_range that starts with last_scanned_key as start_key_open
# to request only rows that have not been returned yet
if not self.message.HasField("rows"):
row_range = data_v2_pb2.RowRange(start_key_open=self.last_scanned_key)
r_kwargs["rows"] = data_v2_pb2.RowSet(row_ranges=[row_range])
else:
row_keys = self._filter_rows_keys()
row_ranges = self._filter_row_ranges()
r_kwargs["rows"] = data_v2_pb2.RowSet(
row_keys=row_keys, row_ranges=row_ranges
)
return data_messages_v2_pb2.ReadRowsRequest(**r_kwargs) |
<SYSTEM_TASK:>
Sends new traces to Stackdriver Trace or updates existing traces. If the ID
<END_TASK>
<USER_TASK:>
Description:
def patch_traces(
self,
project_id,
traces,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sends new traces to Stackdriver Trace or updates existing traces. If the ID
of a trace that you send matches that of an existing trace, any fields
in the existing trace and its spans are overwritten by the provided values,
and any new fields provided are merged with the existing trace data. If the
ID does not match, a new trace is created.
Example:
>>> from google.cloud import trace_v1
>>>
>>> client = trace_v1.TraceServiceClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # TODO: Initialize `traces`:
>>> traces = {}
>>>
>>> client.patch_traces(project_id, traces)
Args:
project_id (str): ID of the Cloud project where the trace data is stored.
traces (Union[dict, ~google.cloud.trace_v1.types.Traces]): The body of the message.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Traces`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "patch_traces" not in self._inner_api_calls:
self._inner_api_calls[
"patch_traces"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.patch_traces,
default_retry=self._method_configs["PatchTraces"].retry,
default_timeout=self._method_configs["PatchTraces"].timeout,
client_info=self._client_info,
)
request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_id", project_id)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["patch_traces"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Returns of a list of traces that match the specified filter conditions.
<END_TASK>
<USER_TASK:>
Description:
def list_traces(
self,
project_id,
view=None,
page_size=None,
start_time=None,
end_time=None,
filter_=None,
order_by=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns of a list of traces that match the specified filter conditions.
Example:
>>> from google.cloud import trace_v1
>>>
>>> client = trace_v1.TraceServiceClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # Iterate over all results
>>> for element in client.list_traces(project_id):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_traces(project_id).pages:
... for element in page:
... # process element
... pass
Args:
project_id (str): ID of the Cloud project where the trace data is stored.
view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is
``MINIMAL``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was
collected from the application.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.trace_v1.types.Timestamp`
filter_ (str): An optional filter against labels for the request.
By default, searches use prefix matching. To specify exact match,
prepend a plus symbol (``+``) to the search term. Multiple terms are
ANDed. Syntax:
- ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root
span starts with ``NAME_PREFIX``.
- ``+root:NAME`` or ``+NAME``: Return traces where any root span's name
is exactly ``NAME``.
- ``span:NAME_PREFIX``: Return traces where any span starts with
``NAME_PREFIX``.
- ``+span:NAME``: Return traces where any span's name is exactly
``NAME``.
- ``latency:DURATION``: Return traces whose overall latency is greater
or equal to than ``DURATION``. Accepted units are nanoseconds
(``ns``), milliseconds (``ms``), and seconds (``s``). Default is
``ms``. For example, ``latency:24ms`` returns traces whose overall
latency is greater than or equal to 24 milliseconds.
- ``label:LABEL_KEY``: Return all traces containing the specified label
key (exact match, case-sensitive) regardless of the key:value pair's
value (including empty values).
- ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the
specified label key (exact match, case-sensitive) whose value starts
with ``VALUE_PREFIX``. Both a key and a value must be specified.
- ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair
exactly matching the specified text. Both a key and a value must be
specified.
- ``method:VALUE``: Equivalent to ``/http/method:VALUE``.
- ``url:VALUE``: Equivalent to ``/http/url:VALUE``.
order_by (str): Field used to sort the returned traces. Optional. Can be one of the
following:
- ``trace_id``
- ``name`` (``name`` field of root span in the trace)
- ``duration`` (difference between ``end_time`` and ``start_time``
fields of the root span)
- ``start`` (``start_time`` field of the root span)
Descending order can be specified by appending ``desc`` to the sort
field (for example, ``name desc``).
Only one sort field is permitted.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "list_traces" not in self._inner_api_calls:
self._inner_api_calls[
"list_traces"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_traces,
default_retry=self._method_configs["ListTraces"].retry,
default_timeout=self._method_configs["ListTraces"].timeout,
client_info=self._client_info,
)
request = trace_pb2.ListTracesRequest(
project_id=project_id,
view=view,
page_size=page_size,
start_time=start_time,
end_time=end_time,
filter=filter_,
order_by=order_by,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_id", project_id)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_traces"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="traces",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator |
<SYSTEM_TASK:>
Gets the details of a specific Redis instance.
<END_TASK>
<USER_TASK:>
Description:
def get_instance(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the details of a specific Redis instance.
Example:
>>> from google.cloud import redis_v1beta1
>>>
>>> client = redis_v1beta1.CloudRedisClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
>>>
>>> response = client.get_instance(name)
Args:
name (str): Required. Redis instance resource name using the form:
``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
where ``location_id`` refers to a GCP region
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.redis_v1beta1.types.Instance` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "get_instance" not in self._inner_api_calls:
self._inner_api_calls[
"get_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_instance,
default_retry=self._method_configs["GetInstance"].retry,
default_timeout=self._method_configs["GetInstance"].timeout,
client_info=self._client_info,
)
request = cloud_redis_pb2.GetInstanceRequest(name=name)
return self._inner_api_calls["get_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Updates the metadata and configuration of a specific Redis instance.
<END_TASK>
<USER_TASK:>
Description:
def update_instance(
self,
update_mask,
instance,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object
in the response field. The returned operation is automatically deleted
after a few hours, so there is no need to call DeleteOperation.
Example:
>>> from google.cloud import redis_v1beta1
>>>
>>> client = redis_v1beta1.CloudRedisClient()
>>>
>>> paths_element = 'display_name'
>>> paths_element_2 = 'memory_size_gb'
>>> paths = [paths_element, paths_element_2]
>>> update_mask = {'paths': paths}
>>> display_name = 'UpdatedDisplayName'
>>> memory_size_gb = 4
>>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb}
>>>
>>> response = client.update_instance(update_mask, instance)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied
in this field. The elements of the repeated paths field may only include
these fields from ``Instance``: \* ``display_name`` \* ``labels`` \*
``memory_size_gb`` \* ``redis_config``
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.redis_v1beta1.types.FieldMask`
instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are
updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.redis_v1beta1.types.Instance`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "update_instance" not in self._inner_api_calls:
self._inner_api_calls[
"update_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_instance,
default_retry=self._method_configs["UpdateInstance"].retry,
default_timeout=self._method_configs["UpdateInstance"].timeout,
client_info=self._client_info,
)
request = cloud_redis_pb2.UpdateInstanceRequest(
update_mask=update_mask, instance=instance
)
operation = self._inner_api_calls["update_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
cloud_redis_pb2.Instance,
metadata_type=any_pb2.Any,
) |
<SYSTEM_TASK:>
Deletes a specific Redis instance. Instance stops serving and data is
<END_TASK>
<USER_TASK:>
Description:
def delete_instance(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a specific Redis instance. Instance stops serving and data is
deleted.
Example:
>>> from google.cloud import redis_v1beta1
>>>
>>> client = redis_v1beta1.CloudRedisClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
>>>
>>> response = client.delete_instance(name)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): Required. Redis instance resource name using the form:
``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
where ``location_id`` refers to a GCP region
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "delete_instance" not in self._inner_api_calls:
self._inner_api_calls[
"delete_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_instance,
default_retry=self._method_configs["DeleteInstance"].retry,
default_timeout=self._method_configs["DeleteInstance"].timeout,
client_info=self._client_info,
)
request = cloud_redis_pb2.DeleteInstanceRequest(name=name)
operation = self._inner_api_calls["delete_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
empty_pb2.Empty,
metadata_type=any_pb2.Any,
) |
<SYSTEM_TASK:>
Return a fully-qualified region string.
<END_TASK>
<USER_TASK:>
Description:
def region_path(cls, project, region):
"""Return a fully-qualified region string.""" |
return google.api_core.path_template.expand(
"projects/{project}/regions/{region}", project=project, region=region
) |
<SYSTEM_TASK:>
Return a fully-qualified workflow_template string.
<END_TASK>
<USER_TASK:>
Description:
def workflow_template_path(cls, project, region, workflow_template):
"""Return a fully-qualified workflow_template string.""" |
return google.api_core.path_template.expand(
"projects/{project}/regions/{region}/workflowTemplates/{workflow_template}",
project=project,
region=region,
workflow_template=workflow_template,
) |
<SYSTEM_TASK:>
Creates new workflow template.
<END_TASK>
<USER_TASK:>
Description:
def create_workflow_template(
self,
parent,
template,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates new workflow template.
Example:
>>> from google.cloud import dataproc_v1beta2
>>>
>>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
>>>
>>> parent = client.region_path('[PROJECT]', '[REGION]')
>>>
>>> # TODO: Initialize `template`:
>>> template = {}
>>>
>>> response = client.create_workflow_template(parent, template)
Args:
parent (str): Required. The "resource name" of the region, as described in
https://cloud.google.com/apis/design/resource\_names of the form
``projects/{project_id}/regions/{region}``
template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_workflow_template" not in self._inner_api_calls:
self._inner_api_calls[
"create_workflow_template"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_workflow_template,
default_retry=self._method_configs["CreateWorkflowTemplate"].retry,
default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout,
client_info=self._client_info,
)
request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
parent=parent, template=template
)
return self._inner_api_calls["create_workflow_template"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Retrieves the latest workflow template.
<END_TASK>
<USER_TASK:>
Description:
def get_workflow_template(
self,
name,
version=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Retrieves the latest workflow template.
Can retrieve previously instantiated template by specifying optional
version parameter.
Example:
>>> from google.cloud import dataproc_v1beta2
>>>
>>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
>>>
>>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]')
>>>
>>> response = client.get_workflow_template(name)
Args:
name (str): Required. The "resource name" of the workflow template, as described in
https://cloud.google.com/apis/design/resource\_names of the form
``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}``
version (int): Optional. The version of workflow template to retrieve. Only previously
instatiated versions can be retrieved.
If unspecified, retrieves the current version.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "get_workflow_template" not in self._inner_api_calls:
self._inner_api_calls[
"get_workflow_template"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_workflow_template,
default_retry=self._method_configs["GetWorkflowTemplate"].retry,
default_timeout=self._method_configs["GetWorkflowTemplate"].timeout,
client_info=self._client_info,
)
request = workflow_templates_pb2.GetWorkflowTemplateRequest(
name=name, version=version
)
return self._inner_api_calls["get_workflow_template"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Parse a resource fragment into a schema field.
<END_TASK>
<USER_TASK:>
Description:
def _parse_schema_resource(info):
"""Parse a resource fragment into a schema field.
Args:
info: (Mapping[str->dict]): should contain a "fields" key to be parsed
Returns:
(Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None])
a list of parsed fields, or ``None`` if no "fields" key found.
""" |
if "fields" not in info:
return ()
schema = []
for r_field in info["fields"]:
name = r_field["name"]
field_type = r_field["type"]
mode = r_field.get("mode", "NULLABLE")
description = r_field.get("description")
sub_fields = _parse_schema_resource(r_field)
schema.append(SchemaField(name, field_type, mode, description, sub_fields))
return schema |
<SYSTEM_TASK:>
Return a ``SchemaField`` object deserialized from a dictionary.
<END_TASK>
<USER_TASK:>
Description:
def from_api_repr(cls, api_repr):
"""Return a ``SchemaField`` object deserialized from a dictionary.
Args:
api_repr (Mapping[str, str]): The serialized representation
of the SchemaField, such as what is output by
:meth:`to_api_repr`.
Returns:
google.cloud.biquery.schema.SchemaField:
The ``SchemaField`` object.
""" |
# Handle optional properties with default values
mode = api_repr.get("mode", "NULLABLE")
description = api_repr.get("description")
fields = api_repr.get("fields", ())
return cls(
field_type=api_repr["type"].upper(),
fields=[cls.from_api_repr(f) for f in fields],
mode=mode.upper(),
description=description,
name=api_repr["name"],
) |
<SYSTEM_TASK:>
Return a dictionary representing this schema field.
<END_TASK>
<USER_TASK:>
Description:
def to_api_repr(self):
"""Return a dictionary representing this schema field.
Returns:
dict: A dictionary representing the SchemaField in a serialized
form.
""" |
# Put together the basic representation. See http://bit.ly/2hOAT5u.
answer = {
"mode": self.mode.upper(),
"name": self.name,
"type": self.field_type.upper(),
"description": self.description,
}
# If this is a RECORD type, then sub-fields are also included,
# add this to the serialized representation.
if self.field_type.upper() == "RECORD":
answer["fields"] = [f.to_api_repr() for f in self.fields]
# Done; return the serialized dictionary.
return answer |
<SYSTEM_TASK:>
Loads keys from database.
<END_TASK>
<USER_TASK:>
Description:
def load_keys(database, parameters):
"""Loads keys from database.""" |
keys = []
with database.snapshot() as snapshot:
results = snapshot.execute_sql(
'SELECT u.id FROM %s u' % parameters['table'])
for row in results:
keys.append(row[0])
return keys |
<SYSTEM_TASK:>
Does a single read operation.
<END_TASK>
<USER_TASK:>
Description:
def read(database, table, key):
"""Does a single read operation.""" |
with database.snapshot() as snapshot:
result = snapshot.execute_sql('SELECT u.* FROM %s u WHERE u.id="%s"' %
(table, key))
for row in result:
key = row[0]
for i in range(NUM_FIELD):
field = row[i + 1] |
<SYSTEM_TASK:>
Does a single operation and records latency.
<END_TASK>
<USER_TASK:>
Description:
def do_operation(database, keys, table, operation, latencies_ms):
"""Does a single operation and records latency.""" |
key = random.choice(keys)
start = timeit.default_timer()
if operation == 'read':
read(database, table, key)
elif operation == 'update':
update(database, table, key)
else:
raise ValueError('Unknown operation: %s' % operation)
end = timeit.default_timer()
latencies_ms[operation].append((end - start) * 1000) |
<SYSTEM_TASK:>
Runs workload against the database.
<END_TASK>
<USER_TASK:>
Description:
def run_workload(database, keys, parameters):
"""Runs workload against the database.""" |
total_weight = 0.0
weights = []
operations = []
latencies_ms = {}
for operation in OPERATIONS:
weight = float(parameters[operation])
if weight <= 0.0:
continue
total_weight += weight
op_code = operation.split('proportion')[0]
operations.append(op_code)
weights.append(total_weight)
latencies_ms[op_code] = []
threads = []
start = timeit.default_timer()
for i in range(int(parameters['num_worker'])):
thread = WorkloadThread(database, keys, parameters, total_weight,
weights, operations)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
end = timeit.default_timer()
for thread in threads:
thread_latencies_ms = thread.latencies_ms()
for key in latencies_ms.keys():
latencies_ms[key].extend(thread_latencies_ms[key])
aggregate_metrics(latencies_ms, (end - start) * 1000.0,
parameters['num_bucket']) |
<SYSTEM_TASK:>
Run a single thread of the workload.
<END_TASK>
<USER_TASK:>
Description:
def run(self):
"""Run a single thread of the workload.""" |
i = 0
operation_count = int(self._parameters['operationcount'])
while i < operation_count:
i += 1
weight = random.uniform(0, self._total_weight)
for j in range(len(self._weights)):
if weight <= self._weights[j]:
do_operation(self._database, self._keys,
self._parameters['table'],
self._operations[j], self._latencies_ms)
break |
<SYSTEM_TASK:>
Add wrapped versions of the `api` member's methods to the class.
<END_TASK>
<USER_TASK:>
Description:
def add_methods(source_class, blacklist=()):
"""Add wrapped versions of the `api` member's methods to the class.
Any methods passed in `blacklist` are not added.
Additionally, any methods explicitly defined on the wrapped class are
not added.
""" |
def wrap(wrapped_fx):
"""Wrap a GAPIC method; preserve its name and docstring."""
# If this is a static or class method, then we need to *not*
# send self as the first argument.
#
# Similarly, for instance methods, we need to send self.api rather
# than self, since that is where the actual methods were declared.
instance_method = True
# If this is a bound method it's a classmethod.
self = getattr(wrapped_fx, "__self__", None)
if issubclass(type(self), type):
instance_method = False
# Okay, we have figured out what kind of method this is; send
# down the correct wrapper function.
if instance_method:
fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa
return functools.wraps(wrapped_fx)(fx)
fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa
return staticmethod(functools.wraps(wrapped_fx)(fx))
def actual_decorator(cls):
# Reflectively iterate over most of the methods on the source class
# (the GAPIC) and make wrapped versions available on this client.
for name in dir(source_class):
# Ignore all private and magic methods.
if name.startswith("_"):
continue
# Ignore anything on our blacklist.
if name in blacklist:
continue
# Retrieve the attribute, and ignore it if it is not callable.
attr = getattr(source_class, name)
if not callable(attr):
continue
# Add a wrapper method to this object.
fx = wrap(getattr(source_class, name))
setattr(cls, name, fx)
# Return the augmented class.
return cls
# Simply return the actual decorator; this is returned from this method
# and actually used to decorate the class.
return actual_decorator |
<SYSTEM_TASK:>
Creates and returns a new product resource.
<END_TASK>
<USER_TASK:>
Description:
def create_product(
self,
parent,
product,
product_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates and returns a new product resource.
Possible errors:
- Returns INVALID\_ARGUMENT if display\_name is missing or longer than
4096 characters.
- Returns INVALID\_ARGUMENT if description is longer than 4096
characters.
- Returns INVALID\_ARGUMENT if product\_category is missing or invalid.
Example:
>>> from google.cloud import vision_v1
>>>
>>> client = vision_v1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `product`:
>>> product = {}
>>>
>>> response = client.create_product(parent, product)
Args:
parent (str): The project in which the Product should be created.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
product (Union[dict, ~google.cloud.vision_v1.types.Product]): The product to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1.types.Product`
product_id (str): A user-supplied resource id for this Product. If set, the server will
attempt to use this value as the resource id. If it is already in use,
an error is returned with code ALREADY\_EXISTS. Must be at most 128
characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1.types.Product` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_product" not in self._inner_api_calls:
self._inner_api_calls[
"create_product"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_product,
default_retry=self._method_configs["CreateProduct"].retry,
default_timeout=self._method_configs["CreateProduct"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.CreateProductRequest(
parent=parent, product=product, product_id=product_id
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_product"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Makes changes to a Product resource. Only the ``display_name``,
<END_TASK>
<USER_TASK:>
Description:
def update_product(
self,
product,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Makes changes to a Product resource. Only the ``display_name``,
``description``, and ``labels`` fields can be updated right now.
If labels are updated, the change will not be reflected in queries until
the next index time.
Possible errors:
- Returns NOT\_FOUND if the Product does not exist.
- Returns INVALID\_ARGUMENT if display\_name is present in update\_mask
but is missing from the request or longer than 4096 characters.
- Returns INVALID\_ARGUMENT if description is present in update\_mask
but is longer than 4096 characters.
- Returns INVALID\_ARGUMENT if product\_category is present in
update\_mask.
Example:
>>> from google.cloud import vision_v1
>>>
>>> client = vision_v1.ProductSearchClient()
>>>
>>> # TODO: Initialize `product`:
>>> product = {}
>>>
>>> response = client.update_product(product)
Args:
product (Union[dict, ~google.cloud.vision_v1.types.Product]): The Product resource which replaces the one on the server.
product.name is immutable.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1.types.Product`
update_mask (Union[dict, ~google.cloud.vision_v1.types.FieldMask]): The ``FieldMask`` that specifies which fields to update. If update\_mask
isn't specified, all mutable fields are to be updated. Valid mask paths
include ``product_labels``, ``display_name``, and ``description``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1.types.Product` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "update_product" not in self._inner_api_calls:
self._inner_api_calls[
"update_product"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_product,
default_retry=self._method_configs["UpdateProduct"].retry,
default_timeout=self._method_configs["UpdateProduct"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.UpdateProductRequest(
product=product, update_mask=update_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("product.name", product.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["update_product"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Copy ``entity`` into ``entity_pb``.
<END_TASK>
<USER_TASK:>
Description:
def _assign_entity_to_pb(entity_pb, entity):
"""Copy ``entity`` into ``entity_pb``.
Helper method for ``Batch.put``.
:type entity_pb: :class:`.entity_pb2.Entity`
:param entity_pb: The entity owned by a mutation.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity being updated within the batch / transaction.
""" |
bare_entity_pb = helpers.entity_to_protobuf(entity)
bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
entity_pb.CopyFrom(bare_entity_pb) |
<SYSTEM_TASK:>
Extract response data from a commit response.
<END_TASK>
<USER_TASK:>
Description:
def _parse_commit_response(commit_response_pb):
"""Extract response data from a commit response.
:type commit_response_pb: :class:`.datastore_pb2.CommitResponse`
:param commit_response_pb: The protobuf response from a commit request.
:rtype: tuple
:returns: The pair of the number of index updates and a list of
:class:`.entity_pb2.Key` for each incomplete key
that was completed in the commit.
""" |
mut_results = commit_response_pb.mutation_results
index_updates = commit_response_pb.index_updates
completed_keys = [
mut_result.key for mut_result in mut_results if mut_result.HasField("key")
] # Message field (Key)
return index_updates, completed_keys |
<SYSTEM_TASK:>
Adds a new mutation for an entity with a partial key.
<END_TASK>
<USER_TASK:>
Description:
def _add_partial_key_entity_pb(self):
"""Adds a new mutation for an entity with a partial key.
:rtype: :class:`.entity_pb2.Entity`
:returns: The newly created entity protobuf that will be
updated and sent with a commit.
""" |
new_mutation = _datastore_pb2.Mutation()
self._mutations.append(new_mutation)
return new_mutation.insert |
<SYSTEM_TASK:>
Adds a new mutation for an entity with a completed key.
<END_TASK>
<USER_TASK:>
Description:
def _add_complete_key_entity_pb(self):
"""Adds a new mutation for an entity with a completed key.
:rtype: :class:`.entity_pb2.Entity`
:returns: The newly created entity protobuf that will be
updated and sent with a commit.
""" |
# We use ``upsert`` for entities with completed keys, rather than
# ``insert`` or ``update``, in order not to create race conditions
# based on prior existence / removal of the entity.
new_mutation = _datastore_pb2.Mutation()
self._mutations.append(new_mutation)
return new_mutation.upsert |
<SYSTEM_TASK:>
Adds a new mutation for a key to be deleted.
<END_TASK>
<USER_TASK:>
Description:
def _add_delete_key_pb(self):
"""Adds a new mutation for a key to be deleted.
:rtype: :class:`.entity_pb2.Key`
:returns: The newly created key protobuf that will be
deleted when sent with a commit.
""" |
new_mutation = _datastore_pb2.Mutation()
self._mutations.append(new_mutation)
return new_mutation.delete |
<SYSTEM_TASK:>
Rolls back the current batch.
<END_TASK>
<USER_TASK:>
Description:
def rollback(self):
"""Rolls back the current batch.
Marks the batch as aborted (can't be used again).
Overridden by :class:`google.cloud.datastore.transaction.Transaction`.
:raises: :class:`~exceptions.ValueError` if the batch is not
in progress.
""" |
if self._status != self._IN_PROGRESS:
raise ValueError("Batch must be in progress to rollback()")
self._status = self._ABORTED |
<SYSTEM_TASK:>
Creates a new table in the specified instance.
<END_TASK>
<USER_TASK:>
Description:
def create_table(
self,
parent,
table_id,
table,
initial_splits=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new table in the specified instance.
The table can be created with a full set of initial column families,
specified in the request.
Example:
>>> from google.cloud import bigtable_admin_v2
>>>
>>> client = bigtable_admin_v2.BigtableTableAdminClient()
>>>
>>> parent = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> # TODO: Initialize `table_id`:
>>> table_id = ''
>>>
>>> # TODO: Initialize `table`:
>>> table = {}
>>>
>>> response = client.create_table(parent, table_id, table)
Args:
parent (str): The unique name of the instance in which to create the table. Values are
of the form ``projects/<project>/instances/<instance>``.
table_id (str): The name by which the new table should be referred to within the parent
instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``.
table (Union[dict, ~google.cloud.bigtable_admin_v2.types.Table]): The Table to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.bigtable_admin_v2.types.Table`
initial_splits (list[Union[dict, ~google.cloud.bigtable_admin_v2.types.Split]]): The optional list of row keys that will be used to initially split the
table into several tablets (tablets are similar to HBase regions). Given
two split keys, ``s1`` and ``s2``, three tablets will be created,
spanning the key ranges: ``[, s1), [s1, s2), [s2, )``.
Example:
- Row keys := ``["a", "apple", "custom", "customer_1", "customer_2",``
``"other", "zz"]``
- initial\_split\_keys :=
``["apple", "customer_1", "customer_2", "other"]``
- Key assignment:
- Tablet 1 ``[, apple) => {"a"}.``
- Tablet 2 ``[apple, customer_1) => {"apple", "custom"}.``
- Tablet 3 ``[customer_1, customer_2) => {"customer_1"}.``
- Tablet 4 ``[customer_2, other) => {"customer_2"}.``
- Tablet 5 ``[other, ) => {"other", "zz"}.``
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.bigtable_admin_v2.types.Split`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.bigtable_admin_v2.types.Table` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_table" not in self._inner_api_calls:
self._inner_api_calls[
"create_table"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_table,
default_retry=self._method_configs["CreateTable"].retry,
default_timeout=self._method_configs["CreateTable"].timeout,
client_info=self._client_info,
)
request = bigtable_table_admin_pb2.CreateTableRequest(
parent=parent, table_id=table_id, table=table, initial_splits=initial_splits
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_table"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Creates a new table from the specified snapshot. The target table must
<END_TASK>
<USER_TASK:>
Description:
def create_table_from_snapshot(
self,
parent,
table_id,
source_snapshot,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new table from the specified snapshot. The target table must
not exist. The snapshot and the table must be in the same instance.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
Example:
>>> from google.cloud import bigtable_admin_v2
>>>
>>> client = bigtable_admin_v2.BigtableTableAdminClient()
>>>
>>> parent = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> # TODO: Initialize `table_id`:
>>> table_id = ''
>>>
>>> # TODO: Initialize `source_snapshot`:
>>> source_snapshot = ''
>>>
>>> response = client.create_table_from_snapshot(parent, table_id, source_snapshot)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): The unique name of the instance in which to create the table. Values are
of the form ``projects/<project>/instances/<instance>``.
table_id (str): The name by which the new table should be referred to within the parent
instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``.
source_snapshot (str): The unique name of the snapshot from which to restore the table. The
snapshot and the table must be in the same instance. Values are of the
form
``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/<snapshot>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_table_from_snapshot" not in self._inner_api_calls:
self._inner_api_calls[
"create_table_from_snapshot"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_table_from_snapshot,
default_retry=self._method_configs["CreateTableFromSnapshot"].retry,
default_timeout=self._method_configs["CreateTableFromSnapshot"].timeout,
client_info=self._client_info,
)
request = bigtable_table_admin_pb2.CreateTableFromSnapshotRequest(
parent=parent, table_id=table_id, source_snapshot=source_snapshot
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["create_table_from_snapshot"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
table_pb2.Table,
metadata_type=bigtable_table_admin_pb2.CreateTableFromSnapshotMetadata,
) |
<SYSTEM_TASK:>
Creates a new snapshot in the specified cluster from the specified
<END_TASK>
<USER_TASK:>
Description:
def snapshot_table(
self,
name,
cluster,
snapshot_id,
description,
ttl=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new snapshot in the specified cluster from the specified
source table. The cluster and the table must be in the same instance.
Note: This is a private alpha release of Cloud Bigtable snapshots. This
feature is not currently available to most Cloud Bigtable customers. This
feature might be changed in backward-incompatible ways and is not
recommended for production use. It is not subject to any SLA or deprecation
policy.
Example:
>>> from google.cloud import bigtable_admin_v2
>>>
>>> client = bigtable_admin_v2.BigtableTableAdminClient()
>>>
>>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]')
>>>
>>> # TODO: Initialize `cluster`:
>>> cluster = ''
>>>
>>> # TODO: Initialize `snapshot_id`:
>>> snapshot_id = ''
>>>
>>> # TODO: Initialize `description`:
>>> description = ''
>>>
>>> response = client.snapshot_table(name, cluster, snapshot_id, description)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): The unique name of the table to have the snapshot taken. Values are of
the form ``projects/<project>/instances/<instance>/tables/<table>``.
cluster (str): The name of the cluster where the snapshot will be created in. Values
are of the form
``projects/<project>/instances/<instance>/clusters/<cluster>``.
snapshot_id (str): The ID by which the new snapshot should be referred to within the parent
cluster, e.g., ``mysnapshot`` of the form:
``[_a-zA-Z0-9][-_.a-zA-Z0-9]*`` rather than
``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/mysnapshot``.
description (str): Description of the snapshot.
ttl (Union[dict, ~google.cloud.bigtable_admin_v2.types.Duration]): The amount of time that the new snapshot can stay active after it is
created. Once 'ttl' expires, the snapshot will get deleted. The maximum
amount of time a snapshot can stay active is 7 days. If 'ttl' is not
specified, the default value of 24 hours will be used.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.bigtable_admin_v2.types.Duration`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "snapshot_table" not in self._inner_api_calls:
self._inner_api_calls[
"snapshot_table"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.snapshot_table,
default_retry=self._method_configs["SnapshotTable"].retry,
default_timeout=self._method_configs["SnapshotTable"].timeout,
client_info=self._client_info,
)
request = bigtable_table_admin_pb2.SnapshotTableRequest(
name=name,
cluster=cluster,
snapshot_id=snapshot_id,
description=description,
ttl=ttl,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["snapshot_table"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
table_pb2.Snapshot,
metadata_type=bigtable_table_admin_pb2.SnapshotTableMetadata,
) |
<SYSTEM_TASK:>
Return a fully-qualified company string.
<END_TASK>
<USER_TASK:>
Description:
def company_path(cls, project, company):
"""Return a fully-qualified company string.""" |
return google.api_core.path_template.expand(
"projects/{project}/companies/{company}", project=project, company=company
) |
<SYSTEM_TASK:>
Creates a new company entity.
<END_TASK>
<USER_TASK:>
Description:
def create_company(
self,
parent,
company,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new company entity.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompanyServiceClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `company`:
>>> company = {}
>>>
>>> response = client.create_company(parent, company)
Args:
parent (str): Required.
Resource name of the project under which the company is created.
The format is "projects/{project\_id}", for example,
"projects/api-test-project".
company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required.
The company to be created.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.Company`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.Company` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "create_company" not in self._inner_api_calls:
self._inner_api_calls[
"create_company"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_company,
default_retry=self._method_configs["CreateCompany"].retry,
default_timeout=self._method_configs["CreateCompany"].timeout,
client_info=self._client_info,
)
request = company_service_pb2.CreateCompanyRequest(
parent=parent, company=company
)
return self._inner_api_calls["create_company"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Updates specified company.
<END_TASK>
<USER_TASK:>
Description:
def update_company(
self,
company,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates specified company.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompanyServiceClient()
>>>
>>> # TODO: Initialize `company`:
>>> company = {}
>>>
>>> response = client.update_company(company)
Args:
company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required.
The company resource to replace the current resource in the system.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.Company`
update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience.
If ``update_mask`` is provided, only the specified fields in ``company``
are updated. Otherwise all the fields are updated.
A field mask to specify the company fields to be updated. Only top level
fields of ``Company`` are supported.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.talent_v4beta1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.Company` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "update_company" not in self._inner_api_calls:
self._inner_api_calls[
"update_company"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_company,
default_retry=self._method_configs["UpdateCompany"].retry,
default_timeout=self._method_configs["UpdateCompany"].timeout,
client_info=self._client_info,
)
request = company_service_pb2.UpdateCompanyRequest(
company=company, update_mask=update_mask
)
return self._inner_api_calls["update_company"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Return DNS quotas for the project associated with this client.
<END_TASK>
<USER_TASK:>
Description:
def quotas(self):
"""Return DNS quotas for the project associated with this client.
See
https://cloud.google.com/dns/api/v1/projects/get
:rtype: mapping
:returns: keys for the mapping correspond to those of the ``quota``
sub-mapping of the project resource.
""" |
path = "/projects/%s" % (self.project,)
resp = self._connection.api_request(method="GET", path=path)
return {
key: int(value) for key, value in resp["quota"].items() if key != "kind"
} |
<SYSTEM_TASK:>
List zones for the project associated with this client.
<END_TASK>
<USER_TASK:>
Description:
def list_zones(self, max_results=None, page_token=None):
"""List zones for the project associated with this client.
See
https://cloud.google.com/dns/api/v1/managedZones/list
:type max_results: int
:param max_results: maximum number of zones to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: Optional. If present, return the next batch of
zones, using the value, which must correspond to the
``nextPageToken`` value returned in the previous response.
Deprecated: use the ``pages`` property of the returned iterator
instead of manually passing the token.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.dns.zone.ManagedZone`
belonging to this project.
""" |
path = "/projects/%s/managedZones" % (self.project,)
return page_iterator.HTTPIterator(
client=self,
api_request=self._connection.api_request,
path=path,
item_to_value=_item_to_zone,
items_key="managedZones",
page_token=page_token,
max_results=max_results,
) |
<SYSTEM_TASK:>
Construct a zone bound to this client.
<END_TASK>
<USER_TASK:>
Description:
def zone(self, name, dns_name=None, description=None):
"""Construct a zone bound to this client.
:type name: str
:param name: Name of the zone.
:type dns_name: str
:param dns_name:
(Optional) DNS name of the zone. If not passed, then calls to
:meth:`zone.create` will fail.
:type description: str
:param description:
(Optional) the description for the zone. If not passed, defaults
to the value of 'dns_name'.
:rtype: :class:`google.cloud.dns.zone.ManagedZone`
:returns: a new ``ManagedZone`` instance.
""" |
return ManagedZone(name, dns_name, client=self, description=description) |
<SYSTEM_TASK:>
Convert a Query instance to the corresponding protobuf.
<END_TASK>
<USER_TASK:>
Description:
def _pb_from_query(query):
"""Convert a Query instance to the corresponding protobuf.
:type query: :class:`Query`
:param query: The source query.
:rtype: :class:`.query_pb2.Query`
:returns: A protobuf that can be sent to the protobuf API. N.b. that
it does not contain "in-flight" fields for ongoing query
executions (cursors, offset, limit).
""" |
pb = query_pb2.Query()
for projection_name in query.projection:
pb.projection.add().property.name = projection_name
if query.kind:
pb.kind.add().name = query.kind
composite_filter = pb.filter.composite_filter
composite_filter.op = query_pb2.CompositeFilter.AND
if query.ancestor:
ancestor_pb = query.ancestor.to_protobuf()
# Filter on __key__ HAS_ANCESTOR == ancestor.
ancestor_filter = composite_filter.filters.add().property_filter
ancestor_filter.property.name = "__key__"
ancestor_filter.op = query_pb2.PropertyFilter.HAS_ANCESTOR
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
for property_name, operator, value in query.filters:
pb_op_enum = query.OPERATORS.get(operator)
# Add the specific filter
property_filter = composite_filter.filters.add().property_filter
property_filter.property.name = property_name
property_filter.op = pb_op_enum
# Set the value to filter on based on the type.
if property_name == "__key__":
key_pb = value.to_protobuf()
property_filter.value.key_value.CopyFrom(key_pb)
else:
helpers._set_protobuf_value(property_filter.value, value)
if not composite_filter.filters:
pb.ClearField("filter")
for prop in query.order:
property_order = pb.order.add()
if prop.startswith("-"):
property_order.property.name = prop[1:]
property_order.direction = property_order.DESCENDING
else:
property_order.property.name = prop
property_order.direction = property_order.ASCENDING
for distinct_on_name in query.distinct_on:
pb.distinct_on.add().name = distinct_on_name
return pb |
<SYSTEM_TASK:>
Update the Kind of the Query.
<END_TASK>
<USER_TASK:>
Description:
def kind(self, value):
"""Update the Kind of the Query.
:type value: str
:param value: updated kind for the query.
.. note::
The protobuf specification allows for ``kind`` to be repeated,
but the current implementation returns an error if more than
one value is passed. If the back-end changes in the future to
allow multiple values, this method will be updated to allow passing
either a string or a sequence of strings.
""" |
if not isinstance(value, str):
raise TypeError("Kind must be a string")
self._kind = value |
<SYSTEM_TASK:>
Set the ancestor for the query
<END_TASK>
<USER_TASK:>
Description:
def ancestor(self, value):
"""Set the ancestor for the query
:type value: :class:`~google.cloud.datastore.key.Key`
:param value: the new ancestor key
""" |
if not isinstance(value, Key):
raise TypeError("Ancestor must be a Key")
self._ancestor = value |
<SYSTEM_TASK:>
Filter the query based on a property name, operator and a value.
<END_TASK>
<USER_TASK:>
Description:
def add_filter(self, property_name, operator, value):
"""Filter the query based on a property name, operator and a value.
Expressions take the form of::
.add_filter('<property>', '<operator>', <value>)
where property is a property stored on the entity in the datastore
and operator is one of ``OPERATORS``
(ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
>>> from google.cloud import datastore
>>> client = datastore.Client()
>>> query = client.query(kind='Person')
>>> query.add_filter('name', '=', 'James')
>>> query.add_filter('age', '>', 50)
:type property_name: str
:param property_name: A property name.
:type operator: str
:param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
:type value: :class:`int`, :class:`str`, :class:`bool`,
:class:`float`, :class:`NoneType`,
:class:`datetime.datetime`,
:class:`google.cloud.datastore.key.Key`
:param value: The value to filter on.
:raises: :class:`ValueError` if ``operation`` is not one of the
specified values, or if a filter names ``'__key__'`` but
passes an invalid value (a key is required).
""" |
if self.OPERATORS.get(operator) is None:
error_message = 'Invalid expression: "%s"' % (operator,)
choices_message = "Please use one of: =, <, <=, >, >=."
raise ValueError(error_message, choices_message)
if property_name == "__key__" and not isinstance(value, Key):
raise ValueError('Invalid key: "%s"' % value)
self._filters.append((property_name, operator, value)) |
<SYSTEM_TASK:>
Set the fields returned the query.
<END_TASK>
<USER_TASK:>
Description:
def projection(self, projection):
"""Set the fields returned the query.
:type projection: str or sequence of strings
:param projection: Each value is a string giving the name of a
property to be included in the projection query.
""" |
if isinstance(projection, str):
projection = [projection]
self._projection[:] = projection |
<SYSTEM_TASK:>
Set the fields used to sort query results.
<END_TASK>
<USER_TASK:>
Description:
def order(self, value):
"""Set the fields used to sort query results.
Sort fields will be applied in the order specified.
:type value: str or sequence of strings
:param value: Each value is a string giving the name of the
property on which to sort, optionally preceded by a
hyphen (-) to specify descending order.
Omitting the hyphen implies ascending order.
""" |
if isinstance(value, str):
value = [value]
self._order[:] = value |
<SYSTEM_TASK:>
Set fields used to group query results.
<END_TASK>
<USER_TASK:>
Description:
def distinct_on(self, value):
"""Set fields used to group query results.
:type value: str or sequence of strings
:param value: Each value is a string giving the name of a
property to use to group results together.
""" |
if isinstance(value, str):
value = [value]
self._distinct_on[:] = value |
<SYSTEM_TASK:>
Execute the Query; return an iterator for the matching entities.
<END_TASK>
<USER_TASK:>
Description:
def fetch(
self,
limit=None,
offset=0,
start_cursor=None,
end_cursor=None,
client=None,
eventual=False,
):
"""Execute the Query; return an iterator for the matching entities.
For example::
>>> from google.cloud import datastore
>>> client = datastore.Client()
>>> query = client.query(kind='Person')
>>> query.add_filter('name', '=', 'Sally')
>>> list(query.fetch())
[<Entity object>, <Entity object>, ...]
>>> list(query.fetch(1))
[<Entity object>]
:type limit: int
:param limit: (Optional) limit passed through to the iterator.
:type offset: int
:param offset: (Optional) offset passed through to the iterator.
:type start_cursor: bytes
:param start_cursor: (Optional) cursor passed through to the iterator.
:type end_cursor: bytes
:param end_cursor: (Optional) cursor passed through to the iterator.
:type client: :class:`google.cloud.datastore.client.Client`
:param client: (Optional) client used to connect to datastore.
If not supplied, uses the query's value.
:type eventual: bool
:param eventual: (Optional) Defaults to strongly consistent (False).
Setting True will use eventual consistency,
but cannot be used inside a transaction or
will raise ValueError.
:rtype: :class:`Iterator`
:returns: The iterator for the query.
""" |
if client is None:
client = self._client
return Iterator(
self,
client,
limit=limit,
offset=offset,
start_cursor=start_cursor,
end_cursor=end_cursor,
eventual=eventual,
) |
<SYSTEM_TASK:>
Build a query protobuf.
<END_TASK>
<USER_TASK:>
Description:
def _build_protobuf(self):
"""Build a query protobuf.
Relies on the current state of the iterator.
:rtype:
:class:`.query_pb2.Query`
:returns: The query protobuf object for the current
state of the iterator.
""" |
pb = _pb_from_query(self._query)
start_cursor = self.next_page_token
if start_cursor is not None:
pb.start_cursor = base64.urlsafe_b64decode(start_cursor)
end_cursor = self._end_cursor
if end_cursor is not None:
pb.end_cursor = base64.urlsafe_b64decode(end_cursor)
if self.max_results is not None:
pb.limit.value = self.max_results - self.num_results
if start_cursor is None and self._offset is not None:
# NOTE: We don't need to add an offset to the request protobuf
# if we are using an existing cursor, because the offset
# is only relative to the start of the result set, not
# relative to each page (this method is called per-page)
pb.offset = self._offset
return pb |
<SYSTEM_TASK:>
Process the response from a datastore query.
<END_TASK>
<USER_TASK:>
Description:
def _process_query_results(self, response_pb):
"""Process the response from a datastore query.
:type response_pb: :class:`.datastore_pb2.RunQueryResponse`
:param response_pb: The protobuf response from a ``runQuery`` request.
:rtype: iterable
:returns: The next page of entity results.
:raises ValueError: If ``more_results`` is an unexpected value.
""" |
self._skipped_results = response_pb.batch.skipped_results
if response_pb.batch.more_results == _NO_MORE_RESULTS:
self.next_page_token = None
else:
self.next_page_token = base64.urlsafe_b64encode(
response_pb.batch.end_cursor
)
self._end_cursor = None
if response_pb.batch.more_results == _NOT_FINISHED:
self._more_results = True
elif response_pb.batch.more_results in _FINISHED:
self._more_results = False
else:
raise ValueError("Unexpected value returned for `more_results`.")
return [result.entity for result in response_pb.batch.entity_results] |
<SYSTEM_TASK:>
Update one or more existing table rows.
<END_TASK>
<USER_TASK:>
Description:
def update(self, table, columns, values):
"""Update one or more existing table rows.
:type table: str
:param table: Name of the table to be modified.
:type columns: list of str
:param columns: Name of the table columns to be modified.
:type values: list of lists
:param values: Values to be modified.
""" |
self._mutations.append(Mutation(update=_make_write_pb(table, columns, values))) |
<SYSTEM_TASK:>
Delete one or more table rows.
<END_TASK>
<USER_TASK:>
Description:
def delete(self, table, keyset):
"""Delete one or more table rows.
:type table: str
:param table: Name of the table to be modified.
:type keyset: :class:`~google.cloud.spanner_v1.keyset.Keyset`
:param keyset: Keys/ranges identifying rows to delete.
""" |
delete = Mutation.Delete(table=table, key_set=keyset._to_pb())
self._mutations.append(Mutation(delete=delete)) |
<SYSTEM_TASK:>
Return if a dataset exists.
<END_TASK>
<USER_TASK:>
Description:
def dataset_exists(client, dataset_reference):
"""Return if a dataset exists.
Args:
client (google.cloud.bigquery.client.Client):
A client to connect to the BigQuery API.
dataset_reference (google.cloud.bigquery.dataset.DatasetReference):
A reference to the dataset to look for.
Returns:
bool: ``True`` if the dataset exists, ``False`` otherwise.
""" |
from google.cloud.exceptions import NotFound
try:
client.get_dataset(dataset_reference)
return True
except NotFound:
return False |
<SYSTEM_TASK:>
Return if a table exists.
<END_TASK>
<USER_TASK:>
Description:
def table_exists(client, table_reference):
"""Return if a table exists.
Args:
client (google.cloud.bigquery.client.Client):
A client to connect to the BigQuery API.
table_reference (google.cloud.bigquery.table.TableReference):
A reference to the table to look for.
Returns:
bool: ``True`` if the table exists, ``False`` otherwise.
""" |
from google.cloud.exceptions import NotFound
try:
client.get_table(table_reference)
return True
except NotFound:
return False |
<SYSTEM_TASK:>
Queries for entities.
<END_TASK>
<USER_TASK:>
Description:
def run_query(
self,
project_id,
partition_id,
read_options=None,
query=None,
gql_query=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Queries for entities.
Example:
>>> from google.cloud import datastore_v1
>>>
>>> client = datastore_v1.DatastoreClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # TODO: Initialize `partition_id`:
>>> partition_id = {}
>>>
>>> response = client.run_query(project_id, partition_id)
Args:
project_id (str): The ID of the project against which to make the request.
partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID.
Queries are scoped to a single partition.
This partition ID is normalized with the standard default context
partition ID.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datastore_v1.types.PartitionId`
read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datastore_v1.types.ReadOptions`
query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datastore_v1.types.Query`
gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datastore_v1.types.GqlQuery`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "run_query" not in self._inner_api_calls:
self._inner_api_calls[
"run_query"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.run_query,
default_retry=self._method_configs["RunQuery"].retry,
default_timeout=self._method_configs["RunQuery"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query)
request = datastore_pb2.RunQueryRequest(
project_id=project_id,
partition_id=partition_id,
read_options=read_options,
query=query,
gql_query=gql_query,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_id", project_id)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["run_query"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Commits a transaction, optionally creating, deleting or modifying some
<END_TASK>
<USER_TASK:>
Description:
def commit(
self,
project_id,
mode,
mutations,
transaction=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Commits a transaction, optionally creating, deleting or modifying some
entities.
Example:
>>> from google.cloud import datastore_v1
>>> from google.cloud.datastore_v1 import enums
>>>
>>> client = datastore_v1.DatastoreClient()
>>>
>>> # TODO: Initialize `project_id`:
>>> project_id = ''
>>>
>>> # TODO: Initialize `mode`:
>>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED
>>>
>>> # TODO: Initialize `mutations`:
>>> mutations = []
>>>
>>> response = client.commit(project_id, mode, mutations)
Args:
project_id (str): The ID of the project against which to make the request.
mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``.
mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform.
When mode is ``TRANSACTIONAL``, mutations affecting a single entity are
applied in order. The following sequences of mutations affecting a
single entity are not permitted in a single ``Commit`` request:
- ``insert`` followed by ``insert``
- ``update`` followed by ``insert``
- ``upsert`` followed by ``insert``
- ``delete`` followed by ``update``
When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single
entity.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.datastore_v1.types.Mutation`
transaction (bytes): The identifier of the transaction associated with the commit. A
transaction identifier is returned by a call to
``Datastore.BeginTransaction``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "commit" not in self._inner_api_calls:
self._inner_api_calls[
"commit"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.commit,
default_retry=self._method_configs["Commit"].retry,
default_timeout=self._method_configs["Commit"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(transaction=transaction)
request = datastore_pb2.CommitRequest(
project_id=project_id,
mode=mode,
mutations=mutations,
transaction=transaction,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_id", project_id)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["commit"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Return the labels for GAE app.
<END_TASK>
<USER_TASK:>
Description:
def get_gae_labels(self):
"""Return the labels for GAE app.
If the trace ID can be detected, it will be included as a label.
Currently, no other labels are included.
:rtype: dict
:returns: Labels for GAE app.
""" |
gae_labels = {}
trace_id = get_trace_id()
if trace_id is not None:
gae_labels[_TRACE_ID_LABEL] = trace_id
return gae_labels |
<SYSTEM_TASK:>
Return a fully-qualified service_account string.
<END_TASK>
<USER_TASK:>
Description:
def service_account_path(cls, project, service_account):
"""Return a fully-qualified service_account string.""" |
return google.api_core.path_template.expand(
"projects/{project}/serviceAccounts/{service_account}",
project=project,
service_account=service_account,
) |
<SYSTEM_TASK:>
Generates an OAuth 2.0 access token for a service account.
<END_TASK>
<USER_TASK:>
Description:
def generate_access_token(
self,
name,
scope,
delegates=None,
lifetime=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Generates an OAuth 2.0 access token for a service account.
Example:
>>> from google.cloud import iam_credentials_v1
>>>
>>> client = iam_credentials_v1.IAMCredentialsClient()
>>>
>>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]')
>>>
>>> # TODO: Initialize `scope`:
>>> scope = []
>>>
>>> response = client.generate_access_token(name, scope)
Args:
name (str): The resource name of the service account for which the credentials are
requested, in the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token.
See https://developers.google.com/identity/protocols/googlescopes for more
information.
At least one value required.
delegates (list[str]): The sequence of service accounts in a delegation chain. Each service
account must be granted the ``roles/iam.serviceAccountTokenCreator``
role on its next service account in the chain. The last service account
in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the service account
that is specified in the ``name`` field of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``
lifetime (Union[dict, ~google.cloud.iam_credentials_v1.types.Duration]): The desired lifetime duration of the access token in seconds.
Must be set to a value less than or equal to 3600 (1 hour). If a value is
not specified, the token's lifetime will be set to a default value of one
hour.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iam_credentials_v1.types.Duration`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iam_credentials_v1.types.GenerateAccessTokenResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "generate_access_token" not in self._inner_api_calls:
self._inner_api_calls[
"generate_access_token"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.generate_access_token,
default_retry=self._method_configs["GenerateAccessToken"].retry,
default_timeout=self._method_configs["GenerateAccessToken"].timeout,
client_info=self._client_info,
)
request = common_pb2.GenerateAccessTokenRequest(
name=name, scope=scope, delegates=delegates, lifetime=lifetime
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["generate_access_token"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Generates an OpenID Connect ID token for a service account.
<END_TASK>
<USER_TASK:>
Description:
def generate_id_token(
self,
name,
audience,
delegates=None,
include_email=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Generates an OpenID Connect ID token for a service account.
Example:
>>> from google.cloud import iam_credentials_v1
>>>
>>> client = iam_credentials_v1.IAMCredentialsClient()
>>>
>>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]')
>>>
>>> # TODO: Initialize `audience`:
>>> audience = ''
>>>
>>> response = client.generate_id_token(name, audience)
Args:
name (str): The resource name of the service account for which the credentials are
requested, in the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
audience (str): The audience for the token, such as the API or account that this token
grants access to.
delegates (list[str]): The sequence of service accounts in a delegation chain. Each service
account must be granted the ``roles/iam.serviceAccountTokenCreator``
role on its next service account in the chain. The last service account
in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the service account
that is specified in the ``name`` field of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``
include_email (bool): Include the service account email in the token. If set to ``true``, the
token will contain ``email`` and ``email_verified`` claims.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdTokenResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "generate_id_token" not in self._inner_api_calls:
self._inner_api_calls[
"generate_id_token"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.generate_id_token,
default_retry=self._method_configs["GenerateIdToken"].retry,
default_timeout=self._method_configs["GenerateIdToken"].timeout,
client_info=self._client_info,
)
request = common_pb2.GenerateIdTokenRequest(
name=name,
audience=audience,
delegates=delegates,
include_email=include_email,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["generate_id_token"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Signs a blob using a service account's system-managed private key.
<END_TASK>
<USER_TASK:>
Description:
def sign_blob(
self,
name,
payload,
delegates=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Signs a blob using a service account's system-managed private key.
Example:
>>> from google.cloud import iam_credentials_v1
>>>
>>> client = iam_credentials_v1.IAMCredentialsClient()
>>>
>>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]')
>>>
>>> # TODO: Initialize `payload`:
>>> payload = b''
>>>
>>> response = client.sign_blob(name, payload)
Args:
name (str): The resource name of the service account for which the credentials are
requested, in the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
payload (bytes): The bytes to sign.
delegates (list[str]): The sequence of service accounts in a delegation chain. Each service
account must be granted the ``roles/iam.serviceAccountTokenCreator``
role on its next service account in the chain. The last service account
in the chain must be granted the
``roles/iam.serviceAccountTokenCreator`` role on the service account
that is specified in the ``name`` field of the request.
The delegates must have the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iam_credentials_v1.types.SignBlobResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "sign_blob" not in self._inner_api_calls:
self._inner_api_calls[
"sign_blob"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.sign_blob,
default_retry=self._method_configs["SignBlob"].retry,
default_timeout=self._method_configs["SignBlob"].timeout,
client_info=self._client_info,
)
request = common_pb2.SignBlobRequest(
name=name, payload=payload, delegates=delegates
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["sign_blob"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Exchange a JWT signed by third party identity provider to an OAuth 2.0
<END_TASK>
<USER_TASK:>
Description:
def generate_identity_binding_access_token(
self,
name,
scope,
jwt,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Exchange a JWT signed by third party identity provider to an OAuth 2.0
access token
Example:
>>> from google.cloud import iam_credentials_v1
>>>
>>> client = iam_credentials_v1.IAMCredentialsClient()
>>>
>>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]')
>>>
>>> # TODO: Initialize `scope`:
>>> scope = []
>>>
>>> # TODO: Initialize `jwt`:
>>> jwt = ''
>>>
>>> response = client.generate_identity_binding_access_token(name, scope, jwt)
Args:
name (str): The resource name of the service account for which the credentials are
requested, in the following format:
``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``.
scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token.
See https://developers.google.com/identity/protocols/googlescopes for more
information.
At least one value required.
jwt (str): Required. Input token. Must be in JWT format according to RFC7523
(https://tools.ietf.org/html/rfc7523) and must have 'kid' field in the
header. Supported signing algorithms: RS256 (RS512, ES256, ES512 coming
soon). Mandatory payload fields (along the lines of RFC 7523, section
3):
- iss: issuer of the token. Must provide a discovery document at
$iss/.well-known/openid-configuration . The document needs to be
formatted according to section 4.2 of the OpenID Connect Discovery
1.0 specification.
- iat: Issue time in seconds since epoch. Must be in the past.
- exp: Expiration time in seconds since epoch. Must be less than 48
hours after iat. We recommend to create tokens that last shorter than
6 hours to improve security unless business reasons mandate longer
expiration times. Shorter token lifetimes are generally more secure
since tokens that have been exfiltrated by attackers can be used for
a shorter time. you can configure the maximum lifetime of the
incoming token in the configuration of the mapper. The resulting
Google token will expire within an hour or at "exp", whichever is
earlier.
- sub: JWT subject, identity asserted in the JWT.
- aud: Configured in the mapper policy. By default the service account
email.
Claims from the incoming token can be transferred into the output token
accoding to the mapper configuration. The outgoing claim size is
limited. Outgoing claims size must be less than 4kB serialized as JSON
without whitespace.
Example header: { "alg": "RS256", "kid":
"92a4265e14ab04d4d228a48d10d4ca31610936f8" } Example payload: { "iss":
"https://accounts.google.com", "iat": 1517963104, "exp": 1517966704,
"aud": "https://iamcredentials.googleapis.com/", "sub":
"113475438248934895348", "my\_claims": { "additional\_claim": "value" }
}
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdentityBindingAccessTokenResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "generate_identity_binding_access_token" not in self._inner_api_calls:
self._inner_api_calls[
"generate_identity_binding_access_token"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.generate_identity_binding_access_token,
default_retry=self._method_configs[
"GenerateIdentityBindingAccessToken"
].retry,
default_timeout=self._method_configs[
"GenerateIdentityBindingAccessToken"
].timeout,
client_info=self._client_info,
)
request = common_pb2.GenerateIdentityBindingAccessTokenRequest(
name=name, scope=scope, jwt=jwt
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["generate_identity_binding_access_token"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Return a fully-qualified entry string.
<END_TASK>
<USER_TASK:>
Description:
def entry_path(cls, project, location, entry_group, entry):
"""Return a fully-qualified entry string.""" |
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}",
project=project,
location=location,
entry_group=entry_group,
entry=entry,
) |
<SYSTEM_TASK:>
Get an entry by target resource name. This method allows clients to use
<END_TASK>
<USER_TASK:>
Description:
def lookup_entry(
self,
linked_resource=None,
sql_resource=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Get an entry by target resource name. This method allows clients to use
the resource name from the source Google Cloud Platform service to get the
Cloud Data Catalog Entry.
Example:
>>> from google.cloud import datacatalog_v1beta1
>>>
>>> client = datacatalog_v1beta1.DataCatalogClient()
>>>
>>> response = client.lookup_entry()
Args:
linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog
entry represents. See:
https://cloud.google.com/apis/design/resource\_names#full\_resource\_name
Full names are case-sensitive.
Examples:
"//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId".
"//pubsub.googleapis.com/projects/projectId/topics/topicId"
sql_resource (str): The SQL name of the entry. SQL names are case-sensitive.
Examples:
1. cloud\_pubsub.project\_id.topic\_id
2. bigquery.project\_id.dataset\_id.table\_id
3. datacatalog.project\_id.location\_id.entry\_group\_id.entry\_id
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
""" |
# Wrap the transport method to add retry and timeout logic.
if "lookup_entry" not in self._inner_api_calls:
self._inner_api_calls[
"lookup_entry"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.lookup_entry,
default_retry=self._method_configs["LookupEntry"].retry,
default_timeout=self._method_configs["LookupEntry"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
linked_resource=linked_resource, sql_resource=sql_resource
)
request = datacatalog_pb2.LookupEntryRequest(
linked_resource=linked_resource, sql_resource=sql_resource
)
return self._inner_api_calls["lookup_entry"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
<SYSTEM_TASK:>
Convert a path tuple into a full path string.
<END_TASK>
<USER_TASK:>
Description:
def _get_document_path(client, path):
"""Convert a path tuple into a full path string.
Of the form:
``projects/{project_id}/databases/{database_id}/...
documents/{document_path}``
Args:
client (~.firestore_v1beta1.client.Client): The client that holds
configuration details and a GAPIC client object.
path (Tuple[str, ...]): The components in a document path.
Returns:
str: The fully-qualified document path.
""" |
parts = (client._database_string, "documents") + path
return _helpers.DOCUMENT_PATH_DELIMITER.join(parts) |
<SYSTEM_TASK:>
Consume a gRPC stream that should contain a single response.
<END_TASK>
<USER_TASK:>
Description:
def _consume_single_get(response_iterator):
"""Consume a gRPC stream that should contain a single response.
The stream will correspond to a ``BatchGetDocuments`` request made
for a single document.
Args:
response_iterator (~google.cloud.exceptions.GrpcRendezvous): A
streaming iterator returned from a ``BatchGetDocuments``
request.
Returns:
~google.cloud.proto.firestore.v1beta1.\
firestore_pb2.BatchGetDocumentsResponse: The single "get"
response in the batch.
Raises:
ValueError: If anything other than exactly one response is returned.
""" |
# Calling ``list()`` consumes the entire iterator.
all_responses = list(response_iterator)
if len(all_responses) != 1:
raise ValueError(
"Unexpected response from `BatchGetDocumentsResponse`",
all_responses,
"Expected only one result",
)
return all_responses[0] |
<SYSTEM_TASK:>
Create and cache the full path for this document.
<END_TASK>
<USER_TASK:>
Description:
def _document_path(self):
"""Create and cache the full path for this document.
Of the form:
``projects/{project_id}/databases/{database_id}/...
documents/{document_path}``
Returns:
str: The full document path.
Raises:
ValueError: If the current document reference has no ``client``.
""" |
if self._document_path_internal is None:
if self._client is None:
raise ValueError("A document reference requires a `client`.")
self._document_path_internal = _get_document_path(self._client, self._path)
return self._document_path_internal |
<SYSTEM_TASK:>
Create a sub-collection underneath the current document.
<END_TASK>
<USER_TASK:>
Description:
def collection(self, collection_id):
"""Create a sub-collection underneath the current document.
Args:
collection_id (str): The sub-collection identifier (sometimes
referred to as the "kind").
Returns:
~.firestore_v1beta1.collection.CollectionReference: The
child collection.
""" |
child_path = self._path + (collection_id,)
return self._client.collection(*child_path) |
<SYSTEM_TASK:>
Create the current document in the Firestore database.
<END_TASK>
<USER_TASK:>
Description:
def create(self, document_data):
"""Create the current document in the Firestore database.
Args:
document_data (dict): Property names and values to use for
creating a document.
Returns:
google.cloud.firestore_v1beta1.types.WriteResult: The
write result corresponding to the committed document. A write
result contains an ``update_time`` field.
Raises:
~google.cloud.exceptions.Conflict: If the document already exists.
""" |
batch = self._client.batch()
batch.create(self, document_data)
write_results = batch.commit()
return _first_write_result(write_results) |
<SYSTEM_TASK:>
Replace the current document in the Firestore database.
<END_TASK>
<USER_TASK:>
Description:
def set(self, document_data, merge=False):
"""Replace the current document in the Firestore database.
A write ``option`` can be specified to indicate preconditions of
the "set" operation. If no ``option`` is specified and this document
doesn't exist yet, this method will create it.
Overwrites all content for the document with the fields in
``document_data``. This method performs almost the same functionality
as :meth:`create`. The only difference is that this method doesn't
make any requirements on the existence of the document (unless
``option`` is used), whereas as :meth:`create` will fail if the
document already exists.
Args:
document_data (dict): Property names and values to use for
replacing a document.
merge (Optional[bool] or Optional[List<apispec>]):
If True, apply merging instead of overwriting the state
of the document.
Returns:
google.cloud.firestore_v1beta1.types.WriteResult: The
write result corresponding to the committed document. A write
result contains an ``update_time`` field.
""" |
batch = self._client.batch()
batch.set(self, document_data, merge=merge)
write_results = batch.commit()
return _first_write_result(write_results) |
<SYSTEM_TASK:>
Update an existing document in the Firestore database.
<END_TASK>
<USER_TASK:>
Description:
def update(self, field_updates, option=None):
"""Update an existing document in the Firestore database.
By default, this method verifies that the document exists on the
server before making updates. A write ``option`` can be specified to
override these preconditions.
Each key in ``field_updates`` can either be a field name or a
**field path** (For more information on **field paths**, see
:meth:`~.firestore_v1beta1.client.Client.field_path`.) To
illustrate this, consider a document with
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
},
'other': True,
}
stored on the server. If the field name is used in the update:
.. code-block:: python
>>> field_updates = {
... 'foo': {
... 'quux': 800,
... },
... }
>>> document.update(field_updates)
then all of ``foo`` will be overwritten on the server and the new
value will be
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'quux': 800,
},
'other': True,
}
On the other hand, if a ``.``-delimited **field path** is used in the
update:
.. code-block:: python
>>> field_updates = {
... 'foo.quux': 800,
... }
>>> document.update(field_updates)
then only ``foo.quux`` will be updated on the server and the
field ``foo.bar`` will remain intact:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
'quux': 800,
},
'other': True,
}
.. warning::
A **field path** can only be used as a top-level key in
``field_updates``.
To delete / remove a field from an existing document, use the
:attr:`~.firestore_v1beta1.transforms.DELETE_FIELD` sentinel. So
with the example above, sending
.. code-block:: python
>>> field_updates = {
... 'other': firestore.DELETE_FIELD,
... }
>>> document.update(field_updates)
would update the value on the server to:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
},
}
To set a field to the current time on the server when the
update is received, use the
:attr:`~.firestore_v1beta1.transforms.SERVER_TIMESTAMP` sentinel.
Sending
.. code-block:: python
>>> field_updates = {
... 'foo.now': firestore.SERVER_TIMESTAMP,
... }
>>> document.update(field_updates)
would update the value on the server to:
.. code-block:: python
>>> snapshot = document.get()
>>> snapshot.to_dict()
{
'foo': {
'bar': 'baz',
'now': datetime.datetime(2012, ...),
},
'other': True,
}
Args:
field_updates (dict): Field names or paths to update and values
to update with.
option (Optional[~.firestore_v1beta1.client.WriteOption]): A
write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
google.cloud.firestore_v1beta1.types.WriteResult: The
write result corresponding to the updated document. A write
result contains an ``update_time`` field.
Raises:
~google.cloud.exceptions.NotFound: If the document does not exist.
""" |
batch = self._client.batch()
batch.update(self, field_updates, option=option)
write_results = batch.commit()
return _first_write_result(write_results) |
<SYSTEM_TASK:>
Delete the current document in the Firestore database.
<END_TASK>
<USER_TASK:>
Description:
def delete(self, option=None):
"""Delete the current document in the Firestore database.
Args:
option (Optional[~.firestore_v1beta1.client.WriteOption]): A
write option to make assertions / preconditions on the server
state of the document before applying changes.
Returns:
google.protobuf.timestamp_pb2.Timestamp: The time that the delete
request was received by the server. If the document did not exist
when the delete was sent (i.e. nothing was deleted), this method
will still succeed and will still return the time that the
request was received by the server.
""" |
write_pb = _helpers.pb_for_delete(self._document_path, option)
commit_response = self._client._firestore_api.commit(
self._client._database_string,
[write_pb],
transaction=None,
metadata=self._client._rpc_metadata,
)
return commit_response.commit_time |
<SYSTEM_TASK:>
Retrieve a snapshot of the current document.
<END_TASK>
<USER_TASK:>
Description:
def get(self, field_paths=None, transaction=None):
"""Retrieve a snapshot of the current document.
See :meth:`~.firestore_v1beta1.client.Client.field_path` for
more information on **field paths**.
If a ``transaction`` is used and it already has write operations
added, this method cannot be used (i.e. read-after-write is not
allowed).
Args:
field_paths (Optional[Iterable[str, ...]]): An iterable of field
paths (``.``-delimited list of field names) to use as a
projection of document fields in the returned results. If
no value is provided, all fields will be returned.
transaction (Optional[~.firestore_v1beta1.transaction.\
Transaction]): An existing transaction that this reference
will be retrieved in.
Returns:
~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of
the current document. If the document does not exist at
the time of `snapshot`, the snapshot `reference`, `data`,
`update_time`, and `create_time` attributes will all be
`None` and `exists` will be `False`.
""" |
if isinstance(field_paths, six.string_types):
raise ValueError("'field_paths' must be a sequence of paths, not a string.")
if field_paths is not None:
mask = common_pb2.DocumentMask(field_paths=sorted(field_paths))
else:
mask = None
firestore_api = self._client._firestore_api
try:
document_pb = firestore_api.get_document(
self._document_path,
mask=mask,
transaction=_helpers.get_transaction_id(transaction),
metadata=self._client._rpc_metadata,
)
except exceptions.NotFound:
data = None
exists = False
create_time = None
update_time = None
else:
data = _helpers.decode_dict(document_pb.fields, self._client)
exists = True
create_time = document_pb.create_time
update_time = document_pb.update_time
return DocumentSnapshot(
reference=self,
data=data,
exists=exists,
read_time=None, # No server read_time available
create_time=create_time,
update_time=update_time,
) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.