Code
stringlengths 103
85.9k
| Summary
sequencelengths 0
94
|
---|---|
Please provide a description of the function:def get_request_message(cls, remote_info): # pylint: disable=g-bad-name
if remote_info in cls.__remote_info_cache:
return cls.__remote_info_cache[remote_info]
else:
return remote_info.request_type() | [
"Gets request message or container from remote info.\n\n Args:\n remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding\n to a method.\n\n Returns:\n Either an instance of the request type from the remote or the\n ResourceContainer that was cached with the remote method.\n "
] |
Please provide a description of the function:def get_current_user():
if not _is_auth_info_available():
raise InvalidGetUserCall('No valid endpoints user in environment.')
if _ENDPOINTS_USER_INFO in os.environ:
user_info = os.environ[_ENDPOINTS_USER_INFO]
return users.User(user_info.email)
if _ENV_USE_OAUTH_SCOPE in os.environ:
# We can get more information from the oauth.get_current_user function,
# as long as we know what scope to use. Since that scope has been
# cached, we can just return this:
return oauth.get_current_user(os.environ[_ENV_USE_OAUTH_SCOPE].split())
if (_ENV_AUTH_EMAIL in os.environ and
_ENV_AUTH_DOMAIN in os.environ):
if not os.environ[_ENV_AUTH_EMAIL]:
# Either there was no id token or we were unable to validate it,
# so there's no user.
return None
return users.User(os.environ[_ENV_AUTH_EMAIL],
os.environ[_ENV_AUTH_DOMAIN] or None)
# Shouldn't hit this, because all the _is_auth_info_available cases were
# checked, but just in case.
return None | [
"Get user information from the id_token or oauth token in the request.\n\n This should only be called from within an Endpoints request handler,\n decorated with an @endpoints.method decorator. The decorator should include\n the https://www.googleapis.com/auth/userinfo.email scope.\n\n If `endpoints_management.control.wsgi.AuthenticationMiddleware` is enabled,\n this returns the user info decoded by the middleware. Otherwise, if the\n current request uses an id_token, this validates and parses the token against\n the info in the current request handler and returns the user. Or, for an\n Oauth token, this call validates the token against the tokeninfo endpoint and\n oauth.get_current_user with the scopes provided in the method's decorator.\n\n Returns:\n None if there is no token or it's invalid. If the token was valid, this\n returns a User. Only the user's email field is guaranteed to be set.\n Other fields may be empty.\n\n Raises:\n InvalidGetUserCall: if the environment variables necessary to determine the\n endpoints user are not set. These are typically set when processing a\n request using an Endpoints handler. If they are not set, it likely\n indicates that this function was called from outside an Endpoints request\n handler.\n "
] |
Please provide a description of the function:def _is_auth_info_available():
return (_ENDPOINTS_USER_INFO in os.environ or
(_ENV_AUTH_EMAIL in os.environ and _ENV_AUTH_DOMAIN in os.environ) or
_ENV_USE_OAUTH_SCOPE in os.environ) | [
"Check if user auth info has been set in environment variables."
] |
Please provide a description of the function:def _maybe_set_current_user_vars(method, api_info=None, request=None):
if _is_auth_info_available():
return
# By default, there's no user.
os.environ[_ENV_AUTH_EMAIL] = ''
os.environ[_ENV_AUTH_DOMAIN] = ''
# Choose settings on the method, if specified. Otherwise, choose settings
# from the API. Specifically check for None, so that methods can override
# with empty lists.
try:
api_info = api_info or method.im_self.api_info
except AttributeError:
# The most common case for this is someone passing an unbound method
# to this function, which most likely only happens in our unit tests.
# We could propagate the exception, but this results in some really
# difficult to debug behavior. Better to log a warning and pretend
# there are no API-level settings.
_logger.warning('AttributeError when accessing %s.im_self. An unbound '
'method was probably passed as an endpoints handler.',
method.__name__)
scopes = method.method_info.scopes
audiences = method.method_info.audiences
allowed_client_ids = method.method_info.allowed_client_ids
else:
scopes = (method.method_info.scopes
if method.method_info.scopes is not None
else api_info.scopes)
audiences = (method.method_info.audiences
if method.method_info.audiences is not None
else api_info.audiences)
allowed_client_ids = (method.method_info.allowed_client_ids
if method.method_info.allowed_client_ids is not None
else api_info.allowed_client_ids)
if not scopes and not audiences and not allowed_client_ids:
# The user hasn't provided any information to allow us to parse either
# an id_token or an Oauth token. They appear not to be interested in
# auth.
return
token = _get_token(request)
if not token:
return None
if allowed_client_ids and _is_local_dev():
allowed_client_ids = (constants.API_EXPLORER_CLIENT_ID,) + tuple(allowed_client_ids)
# When every item in the acceptable scopes list is
# "https://www.googleapis.com/auth/userinfo.email", and there is a non-empty
# allowed_client_ids list, the API code will first attempt OAuth 2/OpenID
# Connect ID token processing for any incoming bearer token.
if ((scopes == [_EMAIL_SCOPE] or scopes == (_EMAIL_SCOPE,)) and
allowed_client_ids):
_logger.debug('Checking for id_token.')
issuers = api_info.issuers
if issuers is None:
issuers = _DEFAULT_GOOGLE_ISSUER
elif 'google_id_token' not in issuers:
issuers.update(_DEFAULT_GOOGLE_ISSUER)
time_now = long(time.time())
user = _get_id_token_user(token, issuers, audiences, allowed_client_ids,
time_now, memcache)
if user:
os.environ[_ENV_AUTH_EMAIL] = user.email()
os.environ[_ENV_AUTH_DOMAIN] = user.auth_domain()
return
# Check if the user is interested in an oauth token.
if scopes:
_logger.debug('Checking for oauth token.')
if _is_local_dev():
_set_bearer_user_vars_local(token, allowed_client_ids, scopes)
else:
_set_bearer_user_vars(allowed_client_ids, scopes) | [
"Get user information from the id_token or oauth token in the request.\n\n Used internally by Endpoints to set up environment variables for user\n authentication.\n\n Args:\n method: The class method that's handling this request. This method\n should be annotated with @endpoints.method.\n api_info: An api_config._ApiInfo instance. Optional. If None, will attempt\n to parse api_info from the implicit instance of the method.\n request: The current request, or None.\n "
] |
Please provide a description of the function:def _get_token(
request=None, allowed_auth_schemes=('OAuth', 'Bearer'),
allowed_query_keys=('bearer_token', 'access_token')):
allowed_auth_schemes = _listlike_guard(
allowed_auth_schemes, 'allowed_auth_schemes', iterable_only=True)
# Check if the token is in the Authorization header.
auth_header = os.environ.get('HTTP_AUTHORIZATION')
if auth_header:
for auth_scheme in allowed_auth_schemes:
if auth_header.startswith(auth_scheme):
return auth_header[len(auth_scheme) + 1:]
# If an auth header was specified, even if it's an invalid one, we won't
# look for the token anywhere else.
return None
# Check if the token is in the query string.
if request:
allowed_query_keys = _listlike_guard(
allowed_query_keys, 'allowed_query_keys', iterable_only=True)
for key in allowed_query_keys:
token, _ = request.get_unrecognized_field_info(key)
if token:
return token | [
"Get the auth token for this request.\n\n Auth token may be specified in either the Authorization header or\n as a query param (either access_token or bearer_token). We'll check in\n this order:\n 1. Authorization header.\n 2. bearer_token query param.\n 3. access_token query param.\n\n Args:\n request: The current request, or None.\n\n Returns:\n The token in the request or None.\n "
] |
Please provide a description of the function:def _get_id_token_user(token, issuers, audiences, allowed_client_ids, time_now, cache):
# Verify that the token is valid before we try to extract anything from it.
# This verifies the signature and some of the basic info in the token.
for issuer_key, issuer in issuers.items():
issuer_cert_uri = convert_jwks_uri(issuer.jwks_uri)
try:
parsed_token = _verify_signed_jwt_with_certs(
token, time_now, cache, cert_uri=issuer_cert_uri)
except Exception: # pylint: disable=broad-except
_logger.debug(
'id_token verification failed for issuer %s', issuer_key, exc_info=True)
continue
issuer_values = _listlike_guard(issuer.issuer, 'issuer', log_warning=False)
if isinstance(audiences, _Mapping):
audiences = audiences[issuer_key]
if _verify_parsed_token(
parsed_token, issuer_values, audiences, allowed_client_ids,
# There's some special handling we do for Google issuers.
# ESP doesn't do this, and it's both unnecessary and invalid for other issuers.
# So we'll turn it off except in the Google issuer case.
is_legacy_google_auth=(issuer.issuer == _ISSUERS)):
email = parsed_token['email']
# The token might have an id, but it's a Gaia ID that's been
# obfuscated with the Focus key, rather than the AppEngine (igoogle)
# key. If the developer ever put this email into the user DB
# and retrieved the ID from that, it'd be different from the ID we'd
# return here, so it's safer to not return the ID.
# Instead, we'll only return the email.
return users.User(email) | [
"Get a User for the given id token, if the token is valid.\n\n Args:\n token: The id_token to check.\n issuers: dict of Issuers\n audiences: List of audiences that are acceptable.\n allowed_client_ids: List of client IDs that are acceptable.\n time_now: The current time as a long (eg. long(time.time())).\n cache: Cache to use (eg. the memcache module).\n\n Returns:\n A User if the token is valid, None otherwise.\n "
] |
Please provide a description of the function:def _process_scopes(scopes):
all_scopes = set()
sufficient_scopes = set()
for scope_set in scopes:
scope_set_scopes = frozenset(scope_set.split())
all_scopes.update(scope_set_scopes)
sufficient_scopes.add(scope_set_scopes)
return all_scopes, sufficient_scopes | [
"Parse a scopes list into a set of all scopes and a set of sufficient scope sets.\n\n scopes: A list of strings, each of which is a space-separated list of scopes.\n Examples: ['scope1']\n ['scope1', 'scope2']\n ['scope1', 'scope2 scope3']\n\n Returns:\n all_scopes: a set of strings, each of which is one scope to check for\n sufficient_scopes: a set of sets of strings; each inner set is\n a set of scopes which are sufficient for access.\n Example: {{'scope1'}, {'scope2', 'scope3'}}\n "
] |
Please provide a description of the function:def _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
for sufficient_scope_set in sufficient_scopes:
if sufficient_scope_set.issubset(authorized_scopes):
return True
return False | [
"Check if a list of authorized scopes satisfies any set of sufficient scopes.\n\n Args:\n authorized_scopes: a list of strings, return value from oauth.get_authorized_scopes\n sufficient_scopes: a set of sets of strings, return value from _process_scopes\n "
] |
Please provide a description of the function:def _set_bearer_user_vars(allowed_client_ids, scopes):
all_scopes, sufficient_scopes = _process_scopes(scopes)
try:
authorized_scopes = oauth.get_authorized_scopes(sorted(all_scopes))
except oauth.Error:
_logger.debug('Unable to get authorized scopes.', exc_info=True)
return
if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
_logger.warning('Authorized scopes did not satisfy scope requirements.')
return
client_id = oauth.get_client_id(authorized_scopes)
# The client ID must be in allowed_client_ids. If allowed_client_ids is
# empty, don't allow any client ID. If allowed_client_ids is set to
# SKIP_CLIENT_ID_CHECK, all client IDs will be allowed.
if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
client_id not in allowed_client_ids):
_logger.warning('Client ID is not allowed: %s', client_id)
return
os.environ[_ENV_USE_OAUTH_SCOPE] = ' '.join(authorized_scopes)
_logger.debug('get_current_user() will return user from matched oauth_user.') | [
"Validate the oauth bearer token and set endpoints auth user variables.\n\n If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This\n provides enough information that our endpoints.get_current_user() function\n can get the user.\n\n Args:\n allowed_client_ids: List of client IDs that are acceptable.\n scopes: List of acceptable scopes.\n "
] |
Please provide a description of the function:def _set_bearer_user_vars_local(token, allowed_client_ids, scopes):
# Get token info from the tokeninfo endpoint.
result = urlfetch.fetch(
'%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token})))
if result.status_code != 200:
try:
error_description = json.loads(result.content)['error_description']
except (ValueError, KeyError):
error_description = ''
_logger.error('Token info endpoint returned status %s: %s',
result.status_code, error_description)
return
token_info = json.loads(result.content)
# Validate email.
if 'email' not in token_info:
_logger.warning('Oauth token doesn\'t include an email address.')
return
if token_info.get('email_verified') != 'true':
_logger.warning('Oauth token email isn\'t verified.')
return
# Validate client ID.
client_id = token_info.get('azp')
if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and
client_id not in allowed_client_ids):
_logger.warning('Client ID is not allowed: %s', client_id)
return
# Verify at least one of the scopes matches.
_, sufficient_scopes = _process_scopes(scopes)
authorized_scopes = token_info.get('scope', '').split(' ')
if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes):
_logger.warning('Oauth token scopes don\'t match any acceptable scopes.')
return
os.environ[_ENV_AUTH_EMAIL] = token_info['email']
os.environ[_ENV_AUTH_DOMAIN] = ''
_logger.debug('Local dev returning user from token.') | [
"Validate the oauth bearer token on the dev server.\n\n Since the functions in the oauth module return only example results in local\n development, this hits the tokeninfo endpoint and attempts to validate the\n token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we\n can get the user from the token.\n\n Args:\n token: String with the oauth token to validate.\n allowed_client_ids: List of client IDs that are acceptable.\n scopes: List of acceptable scopes.\n "
] |
Please provide a description of the function:def _verify_parsed_token(parsed_token, issuers, audiences, allowed_client_ids, is_legacy_google_auth=True):
# Verify the issuer.
if parsed_token.get('iss') not in issuers:
_logger.warning('Issuer was not valid: %s', parsed_token.get('iss'))
return False
# Check audiences.
aud = parsed_token.get('aud')
if not aud:
_logger.warning('No aud field in token')
return False
# Special legacy handling if aud == cid. This occurs with iOS and browsers.
# As long as audience == client_id and cid is allowed, we need to accept
# the audience for compatibility.
cid = parsed_token.get('azp')
audience_allowed = (aud in audiences) or (is_legacy_google_auth and aud == cid)
if not audience_allowed:
_logger.warning('Audience not allowed: %s', aud)
return False
# Check allowed client IDs, for legacy auth.
if is_legacy_google_auth:
if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK:
_logger.warning('Client ID check can\'t be skipped for ID tokens. '
'Id_token cannot be verified.')
return False
elif not cid or cid not in allowed_client_ids:
_logger.warning('Client ID is not allowed: %s', cid)
return False
if 'email' not in parsed_token:
return False
return True | [
"Verify a parsed user ID token.\n\n Args:\n parsed_token: The parsed token information.\n issuers: A list of allowed issuers\n audiences: The allowed audiences.\n allowed_client_ids: The allowed client IDs.\n\n Returns:\n True if the token is verified, False otherwise.\n "
] |
Please provide a description of the function:def _get_cert_expiration_time(headers):
# Check the max age of the cert.
cache_control = headers.get('Cache-Control', '')
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 indicates only
# a comma-separated header is valid, so it should be fine to split this on
# commas.
for entry in cache_control.split(','):
match = _MAX_AGE_REGEX.match(entry)
if match:
cache_time_seconds = int(match.group(1))
break
else:
return 0
# Subtract the cert's age.
age = headers.get('Age')
if age is not None:
try:
age = int(age)
except ValueError:
age = 0
cache_time_seconds -= age
return max(0, cache_time_seconds) | [
"Get the expiration time for a cert, given the response headers.\n\n Get expiration time from the headers in the result. If we can't get\n a time from the headers, this returns 0, indicating that the cert\n shouldn't be cached.\n\n Args:\n headers: A dict containing the response headers from the request to get\n certs.\n\n Returns:\n An integer with the number of seconds the cert should be cached. This\n value is guaranteed to be >= 0.\n "
] |
Please provide a description of the function:def _get_cached_certs(cert_uri, cache):
certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE)
if certs is None:
_logger.debug('Cert cache miss for %s', cert_uri)
try:
result = urlfetch.fetch(cert_uri)
except AssertionError:
# This happens in unit tests. Act as if we couldn't get any certs.
return None
if result.status_code == 200:
certs = json.loads(result.content)
expiration_time_seconds = _get_cert_expiration_time(result.headers)
if expiration_time_seconds:
cache.set(cert_uri, certs, time=expiration_time_seconds,
namespace=_CERT_NAMESPACE)
else:
_logger.error(
'Certs not available, HTTP request returned %d', result.status_code)
return certs | [
"Get certs from cache if present; otherwise, gets from URI and caches them.\n\n Args:\n cert_uri: URI from which to retrieve certs if cache is stale or empty.\n cache: Cache of pre-fetched certs.\n\n Returns:\n The retrieved certs.\n "
] |
Please provide a description of the function:def _verify_signed_jwt_with_certs(
jwt, time_now, cache,
cert_uri=_DEFAULT_CERT_URI):
segments = jwt.split('.')
if len(segments) != 3:
# Note that anywhere we print the jwt or its json body, we need to use
# %r instead of %s, so that non-printable characters are escaped safely.
raise _AppIdentityError('Token is not an id_token (Wrong number of '
'segments)')
signed = '%s.%s' % (segments[0], segments[1])
signature = _urlsafe_b64decode(segments[2])
# pycrypto only deals in integers, so we have to convert the string of bytes
# into a long.
lsignature = long(signature.encode('hex'), 16)
# Verify expected header.
header_body = _urlsafe_b64decode(segments[0])
try:
header = json.loads(header_body)
except:
raise _AppIdentityError("Can't parse header")
if header.get('alg') != 'RS256':
raise _AppIdentityError('Unexpected encryption algorithm: %r' %
header.get('alg'))
# Formerly we would parse the token body here.
# However, it's not safe to do that without first checking the signature.
certs = _get_cached_certs(cert_uri, cache)
if certs is None:
raise _AppIdentityError(
'Unable to retrieve certs needed to verify the signed JWT')
# Verify that we were able to load the Crypto libraries, before we try
# to use them.
if not _CRYPTO_LOADED:
raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify '
'id_token signature. See http://www.pycrypto.org '
'for more information on pycrypto.')
# SHA256 hash of the already 'signed' segment from the JWT. Since a SHA256
# hash, will always have length 64.
local_hash = SHA256.new(signed).hexdigest()
# Check signature.
verified = False
for keyvalue in certs['keyvalues']:
try:
modulus = _b64_to_long(keyvalue['modulus'])
exponent = _b64_to_long(keyvalue['exponent'])
key = RSA.construct((modulus, exponent))
# Encrypt, and convert to a hex string.
hexsig = '%064x' % key.encrypt(lsignature, '')[0]
# Make sure we have only last 64 base64 chars
hexsig = hexsig[-64:]
# Check the signature on 'signed' by encrypting 'signature' with the
# public key and confirming the result matches the SHA256 hash of
# 'signed'. hmac.compare_digest(a, b) is used to avoid timing attacks.
verified = hmac.compare_digest(hexsig, local_hash)
if verified:
break
except Exception, e: # pylint: disable=broad-except
# Log the exception for debugging purpose.
_logger.debug(
'Signature verification error: %s; continuing with the next cert.', e)
continue
if not verified:
raise _AppIdentityError('Invalid token signature')
# Parse token.
json_body = _urlsafe_b64decode(segments[1])
try:
parsed = json.loads(json_body)
except:
raise _AppIdentityError("Can't parse token body")
# Check creation timestamp.
iat = parsed.get('iat')
if iat is None:
raise _AppIdentityError('No iat field in token')
earliest = iat - _CLOCK_SKEW_SECS
# Check expiration timestamp.
exp = parsed.get('exp')
if exp is None:
raise _AppIdentityError('No exp field in token')
if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS:
raise _AppIdentityError('exp field too far in future')
latest = exp + _CLOCK_SKEW_SECS
if time_now < earliest:
raise _AppIdentityError('Token used too early, %d < %d' %
(time_now, earliest))
if time_now > latest:
raise _AppIdentityError('Token used too late, %d > %d' %
(time_now, latest))
return parsed | [
"Verify a JWT against public certs.\n\n See http://self-issued.info/docs/draft-jones-json-web-token.html.\n\n The PyCrypto library included with Google App Engine is severely limited and\n so you have to use it very carefully to verify JWT signatures. The first\n issue is that the library can't read X.509 files, so we make a call to a\n special URI that has the public cert in modulus/exponent form in JSON.\n\n The second issue is that the RSA.verify method doesn't work, at least for\n how the JWT tokens are signed, so we have to manually verify the signature\n of the JWT, which means hashing the signed part of the JWT and comparing\n that to the signature that's been encrypted with the public key.\n\n Args:\n jwt: string, A JWT.\n time_now: The current time, as a long (eg. long(time.time())).\n cache: Cache to use (eg. the memcache module).\n cert_uri: string, URI to get cert modulus and exponent in JSON format.\n\n Returns:\n dict, The deserialized JSON payload in the JWT.\n\n Raises:\n _AppIdentityError: if any checks are failed.\n "
] |
Please provide a description of the function:def convert_jwks_uri(jwks_uri):
if not jwks_uri.startswith(_TEXT_CERT_PREFIX):
return jwks_uri
return jwks_uri.replace(_TEXT_CERT_PREFIX, _JSON_CERT_PREFIX) | [
"\n The PyCrypto library included with Google App Engine is severely limited and\n can't read X.509 files, so we change the URI to a special URI that has the\n public cert in modulus/exponent form in JSON.\n "
] |
Please provide a description of the function:def get_verified_jwt(
providers, audiences,
check_authorization_header=True, check_query_arg=True,
request=None, cache=memcache):
if not (check_authorization_header or check_query_arg):
raise ValueError(
'Either check_authorization_header or check_query_arg must be True.')
if check_query_arg and request is None:
raise ValueError(
'Cannot check query arg without request object.')
schemes = ('Bearer',) if check_authorization_header else ()
keys = ('access_token',) if check_query_arg else ()
token = _get_token(
request=request, allowed_auth_schemes=schemes, allowed_query_keys=keys)
if token is None:
return None
time_now = long(time.time())
for provider in providers:
parsed_token = _parse_and_verify_jwt(
token, time_now, (provider['issuer'],), audiences, provider['cert_uri'], cache)
if parsed_token is not None:
return parsed_token
return None | [
"\n This function will extract, verify, and parse a JWT token from the\n Authorization header or access_token query argument.\n\n The JWT is assumed to contain an issuer and audience claim, as well\n as issued-at and expiration timestamps. The signature will be\n cryptographically verified, the claims and timestamps will be\n checked, and the resulting parsed JWT body is returned.\n\n If at any point the JWT is missing or found to be invalid, the\n return result will be None.\n\n Arguments:\n providers - An iterable of dicts each containing 'issuer' and 'cert_uri' keys\n audiences - An iterable of valid audiences\n\n check_authorization_header - Boolean; check 'Authorization: Bearer' header\n check_query_arg - Boolean; check 'access_token' query arg\n\n request - Must be the request object if check_query_arg is true; otherwise ignored.\n cache - In testing, override the certificate cache\n "
] |
Please provide a description of the function:def _listlike_guard(obj, name, iterable_only=False, log_warning=True):
required_type = (_Iterable,) if iterable_only else (_Container, _Iterable)
required_type_name = ' or '.join(t.__name__ for t in required_type)
if not isinstance(obj, required_type):
raise ValueError('{} must be of type {}'.format(name, required_type_name))
# at this point it is definitely the right type, but might be a string
if isinstance(obj, basestring):
if log_warning:
_logger.warning('{} passed as a string; should be list-like'.format(name))
return (obj,)
return obj | [
"\n We frequently require passed objects to support iteration or\n containment expressions, but not be strings. (Of course, strings\n support iteration and containment, but not usefully.) If the passed\n object is a string, we'll wrap it in a tuple and return it. If it's\n already an iterable, we'll return it as-is. Otherwise, we'll raise a\n TypeError.\n "
] |
Please provide a description of the function:def __item_descriptor(self, config):
descriptor = {
'kind': 'discovery#directoryItem',
'icons': {
'x16': 'https://www.gstatic.com/images/branding/product/1x/'
'googleg_16dp.png',
'x32': 'https://www.gstatic.com/images/branding/product/1x/'
'googleg_32dp.png',
},
'preferred': True,
}
description = config.get('description')
root_url = config.get('root')
name = config.get('name')
version = config.get('api_version')
relative_path = '/apis/{0}/{1}/rest'.format(name, version)
if description:
descriptor['description'] = description
descriptor['name'] = name
descriptor['version'] = version
descriptor['discoveryLink'] = '.{0}'.format(relative_path)
root_url_port = urlparse.urlparse(root_url).port
original_path = self.__request.reconstruct_full_url(
port_override=root_url_port)
descriptor['discoveryRestUrl'] = '{0}/{1}/{2}/rest'.format(
original_path, name, version)
if name and version:
descriptor['id'] = '{0}:{1}'.format(name, version)
return descriptor | [
"Builds an item descriptor for a service configuration.\n\n Args:\n config: A dictionary containing the service configuration to describe.\n\n Returns:\n A dictionary that describes the service configuration.\n "
] |
Please provide a description of the function:def __directory_list_descriptor(self, configs):
descriptor = {
'kind': 'discovery#directoryList',
'discoveryVersion': 'v1',
}
items = []
for config in configs:
item_descriptor = self.__item_descriptor(config)
if item_descriptor:
items.append(item_descriptor)
if items:
descriptor['items'] = items
return descriptor | [
"Builds a directory list for an API.\n\n Args:\n configs: List of dicts containing the service configurations to list.\n\n Returns:\n A dictionary that can be deserialized into JSON in discovery list format.\n\n Raises:\n ApiConfigurationError: If there's something wrong with the API\n configuration, such as a multiclass API decorated with different API\n descriptors (see the docstring for api()), or a repeated method\n signature.\n "
] |
Please provide a description of the function:def get_directory_list_doc(self, configs):
if not isinstance(configs, (tuple, list)):
configs = [configs]
util.check_list_type(configs, dict, 'configs', allow_none=False)
return self.__directory_list_descriptor(configs) | [
"JSON dict description of a protorpc.remote.Service in list format.\n\n Args:\n configs: Either a single dict or a list of dicts containing the service\n configurations to list.\n\n Returns:\n dict, The directory list document as a JSON dict.\n "
] |
Please provide a description of the function:def pretty_print_config_to_json(self, configs):
descriptor = self.get_directory_list_doc(configs)
return json.dumps(descriptor, sort_keys=True, indent=2,
separators=(',', ': ')) | [
"JSON string description of a protorpc.remote.Service in a discovery doc.\n\n Args:\n configs: Either a single dict or a list of dicts containing the service\n configurations to list.\n\n Returns:\n string, The directory list document as a JSON string.\n "
] |
Please provide a description of the function:def __format_error(self, error_list_tag):
error = {'domain': self.domain(),
'reason': self.reason(),
'message': self.message()}
error.update(self.extra_fields() or {})
return {'error': {error_list_tag: [error],
'code': self.status_code(),
'message': self.message()}} | [
"Format this error into a JSON response.\n\n Args:\n error_list_tag: A string specifying the name of the tag to use for the\n error list.\n\n Returns:\n A dict containing the reformatted JSON error response.\n "
] |
Please provide a description of the function:def rest_error(self):
error_json = self.__format_error('errors')
return json.dumps(error_json, indent=1, sort_keys=True) | [
"Format this error into a response to a REST request.\n\n Returns:\n A string containing the reformatted error response.\n "
] |
Please provide a description of the function:def _get_status_code(self, http_status):
try:
return int(http_status.split(' ', 1)[0])
except TypeError:
_logger.warning('Unable to find status code in HTTP status %r.',
http_status)
return 500 | [
"Get the HTTP status code from an HTTP status string.\n\n Args:\n http_status: A string containing a HTTP status code and reason.\n\n Returns:\n An integer with the status code number from http_status.\n "
] |
Please provide a description of the function:def process_api_config_response(self, config_json):
with self._config_lock:
self._add_discovery_config()
for config in config_json.get('items', []):
lookup_key = config.get('name', ''), config.get('version', '')
self._configs[lookup_key] = config
for config in self._configs.itervalues():
name = config.get('name', '')
api_version = config.get('api_version', '')
path_version = config.get('path_version', '')
sorted_methods = self._get_sorted_methods(config.get('methods', {}))
for method_name, method in sorted_methods:
self._save_rest_method(method_name, name, path_version, method) | [
"Parses a JSON API config and registers methods for dispatch.\n\n Side effects:\n Parses method name, etc. for all methods and updates the indexing\n data structures with the information.\n\n Args:\n config_json: A dict, the JSON body of the getApiConfigs response.\n "
] |
Please provide a description of the function:def _get_sorted_methods(self, methods):
if not methods:
return methods
# Comparison function we'll use to sort the methods:
def _sorted_methods_comparison(method_info1, method_info2):
def _score_path(path):
score = 0
parts = path.split('/')
for part in parts:
score <<= 1
if not part or part[0] != '{':
# Found a constant.
score += 1
# Shift by 31 instead of 32 because some (!) versions of Python like
# to convert the int to a long if we shift by 32, and the sorted()
# function that uses this blows up if it receives anything but an int.
score <<= 31 - len(parts)
return score
# Higher path scores come first.
path_score1 = _score_path(method_info1[1].get('path', ''))
path_score2 = _score_path(method_info2[1].get('path', ''))
if path_score1 != path_score2:
return path_score2 - path_score1
# Compare by path text next, sorted alphabetically.
path_result = cmp(method_info1[1].get('path', ''),
method_info2[1].get('path', ''))
if path_result != 0:
return path_result
# All else being equal, sort by HTTP method.
method_result = cmp(method_info1[1].get('httpMethod', ''),
method_info2[1].get('httpMethod', ''))
return method_result
return sorted(methods.items(), _sorted_methods_comparison) | [
"Get a copy of 'methods' sorted the way they would be on the live server.\n\n Args:\n methods: JSON configuration of an API's methods.\n\n Returns:\n The same configuration with the methods sorted based on what order\n they'll be checked by the server.\n ",
"Sort method info by path and http_method.\n\n Args:\n method_info1: Method name and info for the first method to compare.\n method_info2: Method name and info for the method to compare to.\n\n Returns:\n Negative if the first method should come first, positive if the\n first method should come after the second. Zero if they're\n equivalent.\n ",
"Calculate the score for this path, used for comparisons.\n\n Higher scores have priority, and if scores are equal, the path text\n is sorted alphabetically. Scores are based on the number and location\n of the constant parts of the path. The server has some special handling\n for variables with regexes, which we don't handle here.\n\n Args:\n path: The request path that we're calculating a score for.\n\n Returns:\n The score for the given path.\n "
] |
Please provide a description of the function:def _get_path_params(match):
result = {}
for var_name, value in match.groupdict().iteritems():
actual_var_name = ApiConfigManager._from_safe_path_param_name(var_name)
result[actual_var_name] = urllib.unquote_plus(value)
return result | [
"Gets path parameters from a regular expression match.\n\n Args:\n match: A regular expression Match object for a path.\n\n Returns:\n A dictionary containing the variable names converted from base64.\n "
] |
Please provide a description of the function:def lookup_rest_method(self, path, request_uri, http_method):
method_key = http_method.lower()
with self._config_lock:
for compiled_path_pattern, unused_path, methods in self._rest_methods:
if method_key not in methods:
continue
candidate_method_info = methods[method_key]
match_against = request_uri if candidate_method_info[1].get('useRequestUri') else path
match = compiled_path_pattern.match(match_against)
if match:
params = self._get_path_params(match)
method_name, method = candidate_method_info
break
else:
_logger.warn('No endpoint found for path: %r, method: %r', path, http_method)
method_name = None
method = None
params = None
return method_name, method, params | [
"Look up the rest method at call time.\n\n The method is looked up in self._rest_methods, the list it is saved\n in for SaveRestMethod.\n\n Args:\n path: A string containing the path from the URL of the request.\n http_method: A string containing HTTP method of the request.\n\n Returns:\n Tuple of (<method name>, <method>, <params>)\n Where:\n <method name> is the string name of the method that was matched.\n <method> is the descriptor as specified in the API configuration. -and-\n <params> is a dict of path parameters matched in the rest request.\n "
] |
Please provide a description of the function:def _add_discovery_config(self):
lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'],
discovery_service.DiscoveryService.API_CONFIG['version'])
self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG | [
"Add the Discovery configuration to our list of configs.\n\n This should only be called with self._config_lock. The code here assumes\n the lock is held.\n "
] |
Please provide a description of the function:def save_config(self, lookup_key, config):
with self._config_lock:
self._configs[lookup_key] = config | [
"Save a configuration to the cache of configs.\n\n Args:\n lookup_key: A string containing the cache lookup key.\n config: The dict containing the configuration to save to the cache.\n "
] |
Please provide a description of the function:def _from_safe_path_param_name(safe_parameter):
assert safe_parameter.startswith('_')
safe_parameter_as_base32 = safe_parameter[1:]
padding_length = - len(safe_parameter_as_base32) % 8
padding = '=' * padding_length
return base64.b32decode(safe_parameter_as_base32 + padding) | [
"Takes a safe regex group name and converts it back to the original value.\n\n Only alphanumeric characters and underscore are allowed in variable name\n tokens, and numeric are not allowed as the first character.\n\n The safe_parameter is a base32 representation of the actual value.\n\n Args:\n safe_parameter: A string that was generated by _to_safe_path_param_name.\n\n Returns:\n A string, the parameter matched from the URL template.\n "
] |
Please provide a description of the function:def _compile_path_pattern(pattern):
r
def replace_variable(match):
if match.lastindex > 1:
var_name = ApiConfigManager._to_safe_path_param_name(match.group(2))
return '%s(?P<%s>%s)' % (match.group(1), var_name,
_PATH_VALUE_PATTERN)
return match.group(0)
pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN,
replace_variable, pattern)
return re.compile(pattern + '/?$') | [
"Generates a compiled regex pattern for a path pattern.\n\n e.g. '/MyApi/v1/notes/{id}'\n returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\\[\\]{}]*)')\n\n Args:\n pattern: A string, the parameterized path pattern to be checked.\n\n Returns:\n A compiled regex object to match this path pattern.\n ",
"Replaces a {variable} with a regex to match it by name.\n\n Changes the string corresponding to the variable name to the base32\n representation of the string, prepended by an underscore. This is\n necessary because we can have message variable names in URL patterns\n (e.g. via {x.y}) but the character '.' can't be in a regex group name.\n\n Args:\n match: A regex match object, the matching regex group as sent by\n re.sub().\n\n Returns:\n A string regex to match the variable by name, if the full pattern was\n matched.\n "
] |
Please provide a description of the function:def _save_rest_method(self, method_name, api_name, version, method):
path_pattern = '/'.join((api_name, version, method.get('path', '')))
http_method = method.get('httpMethod', '').lower()
for _, path, methods in self._rest_methods:
if path == path_pattern:
methods[http_method] = method_name, method
break
else:
self._rest_methods.append(
(self._compile_path_pattern(path_pattern),
path_pattern,
{http_method: (method_name, method)})) | [
"Store Rest api methods in a list for lookup at call time.\n\n The list is self._rest_methods, a list of tuples:\n [(<compiled_path>, <path_pattern>, <method_dict>), ...]\n where:\n <compiled_path> is a compiled regex to match against the incoming URL\n <path_pattern> is a string representing the original path pattern,\n checked on insertion to prevent duplicates. -and-\n <method_dict> is a dict of httpMethod => (method_name, method)\n\n This structure is a bit complex, it supports use in two contexts:\n Creation time:\n - SaveRestMethod is called repeatedly, each method will have a path,\n which we want to be compiled for fast lookup at call time\n - We want to prevent duplicate incoming path patterns, so store the\n un-compiled path, not counting on a compiled regex being a stable\n comparison as it is not documented as being stable for this use.\n - Need to store the method that will be mapped at calltime.\n - Different methods may have the same path but different http method.\n Call time:\n - Quickly scan through the list attempting .match(path) on each\n compiled regex to find the path that matches.\n - When a path is matched, look up the API method from the request\n and get the method name and method config for the matching\n API method and method name.\n\n Args:\n method_name: A string containing the name of the API method.\n api_name: A string containing the name of the API.\n version: A string containing the version of the API.\n method: A dict containing the method descriptor (as in the api config\n file).\n "
] |
Please provide a description of the function:def api_server(api_services, **kwargs):
# Disallow protocol configuration for now, Lily is json-only.
if 'protocols' in kwargs:
raise TypeError("__init__() got an unexpected keyword argument 'protocols'")
from . import _logger as endpoints_logger
from . import __version__ as endpoints_version
endpoints_logger.info('Initializing Endpoints Framework version %s', endpoints_version)
# Construct the api serving app
apis_app = _ApiServer(api_services, **kwargs)
dispatcher = endpoints_dispatcher.EndpointsDispatcherMiddleware(apis_app)
# Determine the service name
service_name = os.environ.get('ENDPOINTS_SERVICE_NAME')
if not service_name:
_logger.warn('Did not specify the ENDPOINTS_SERVICE_NAME environment'
' variable so service control is disabled. Please specify'
' the name of service in ENDPOINTS_SERVICE_NAME to enable'
' it.')
return dispatcher
# If we're using a local server, just return the dispatcher now to bypass
# control client.
if control_wsgi.running_on_devserver():
_logger.warn('Running on local devserver, so service control is disabled.')
return dispatcher
from endpoints_management import _logger as management_logger
from endpoints_management import __version__ as management_version
management_logger.info('Initializing Endpoints Management Framework version %s', management_version)
# The DEFAULT 'config' should be tuned so that it's always OK for python
# App Engine workloads. The config can be adjusted, but that's probably
# unnecessary on App Engine.
controller = control_client.Loaders.DEFAULT.load(service_name)
# Start the GAE background thread that powers the control client's cache.
control_client.use_gae_thread()
controller.start()
return control_wsgi.add_all(
dispatcher,
app_identity.get_application_id(),
controller) | [
"Create an api_server.\n\n The primary function of this method is to set up the WSGIApplication\n instance for the service handlers described by the services passed in.\n Additionally, it registers each API in ApiConfigRegistry for later use\n in the BackendService.getApiConfigs() (API config enumeration service).\n It also configures service control.\n\n Args:\n api_services: List of protorpc.remote.Service classes implementing the API\n or a list of _ApiDecorator instances that decorate the service classes\n for an API.\n **kwargs: Passed through to protorpc.wsgi.service.service_handlers except:\n protocols - ProtoRPC protocols are not supported, and are disallowed.\n\n Returns:\n A new WSGIApplication that serves the API backend and config registry.\n\n Raises:\n TypeError: if protocols are configured (this feature is not supported).\n "
] |
Please provide a description of the function:def register_backend(self, config_contents):
if config_contents is None:
return
self.__register_class(config_contents)
self.__api_configs.append(config_contents)
self.__register_methods(config_contents) | [
"Register a single API and its config contents.\n\n Args:\n config_contents: Dict containing API configuration.\n "
] |
Please provide a description of the function:def __register_class(self, parsed_config):
methods = parsed_config.get('methods')
if not methods:
return
# Determine the name of the class that implements this configuration.
service_classes = set()
for method in methods.itervalues():
rosy_method = method.get('rosyMethod')
if rosy_method and '.' in rosy_method:
method_class = rosy_method.split('.', 1)[0]
service_classes.add(method_class)
for service_class in service_classes:
if service_class in self.__registered_classes:
raise api_exceptions.ApiConfigurationError(
'API class %s has already been registered.' % service_class)
self.__registered_classes.add(service_class) | [
"Register the class implementing this config, so we only add it once.\n\n Args:\n parsed_config: The JSON object with the API configuration being added.\n\n Raises:\n ApiConfigurationError: If the class has already been registered.\n "
] |
Please provide a description of the function:def __register_methods(self, parsed_config):
methods = parsed_config.get('methods')
if not methods:
return
for method_name, method in methods.iteritems():
self.__api_methods[method_name] = method.get('rosyMethod') | [
"Register all methods from the given api config file.\n\n Methods are stored in a map from method_name to rosyMethod,\n the name of the ProtoRPC method to be called on the backend.\n If no rosyMethod was specified the value will be None.\n\n Args:\n parsed_config: The JSON object with the API configuration being added.\n "
] |
Please provide a description of the function:def __create_name_version_map(api_services):
api_name_version_map = {}
for service_factory in api_services:
try:
service_class = service_factory.service_class
except AttributeError:
service_class = service_factory
service_factory = service_class.new_factory()
key = service_class.api_info.name, service_class.api_info.api_version
service_factories = api_name_version_map.setdefault(key, [])
if service_factory in service_factories:
raise api_config.ApiConfigurationError(
'Can\'t add the same class to an API twice: %s' %
service_factory.service_class.__name__)
service_factories.append(service_factory)
return api_name_version_map | [
"Create a map from API name/version to Service class/factory.\n\n This creates a map from an API name and version to a list of remote.Service\n factories that implement that API.\n\n Args:\n api_services: A list of remote.Service-derived classes or factories\n created with remote.Service.new_factory.\n\n Returns:\n A mapping from (api name, api version) to a list of service factories,\n for service classes that implement that API.\n\n Raises:\n ApiConfigurationError: If a Service class appears more than once\n in api_services.\n "
] |
Please provide a description of the function:def __register_services(api_name_version_map, api_config_registry):
generator = api_config.ApiConfigGenerator()
protorpc_services = []
for service_factories in api_name_version_map.itervalues():
service_classes = [service_factory.service_class
for service_factory in service_factories]
config_dict = generator.get_config_dict(service_classes)
api_config_registry.register_backend(config_dict)
for service_factory in service_factories:
protorpc_class_name = service_factory.service_class.__name__
root = '%s%s' % (service_factory.service_class.api_info.base_path,
protorpc_class_name)
if any(service_map[0] == root or service_map[1] == service_factory
for service_map in protorpc_services):
raise api_config.ApiConfigurationError(
'Can\'t reuse the same class in multiple APIs: %s' %
protorpc_class_name)
protorpc_services.append((root, service_factory))
return protorpc_services | [
"Register & return a list of each URL and class that handles that URL.\n\n This finds every service class in api_name_version_map, registers it with\n the given ApiConfigRegistry, builds the URL for that class, and adds\n the URL and its factory to a list that's returned.\n\n Args:\n api_name_version_map: A mapping from (api name, api version) to a list of\n service factories, as returned by __create_name_version_map.\n api_config_registry: The ApiConfigRegistry where service classes will\n be registered.\n\n Returns:\n A list of (URL, service_factory) for each service class in\n api_name_version_map.\n\n Raises:\n ApiConfigurationError: If a Service class appears more than once\n in api_name_version_map. This could happen if one class is used to\n implement multiple APIs.\n "
] |
Please provide a description of the function:def __is_json_error(self, status, headers):
content_header = headers.get('content-type', '')
content_type, unused_params = cgi.parse_header(content_header)
return (status.startswith('400') and
content_type.lower() in _ALL_JSON_CONTENT_TYPES) | [
"Determine if response is an error.\n\n Args:\n status: HTTP status code.\n headers: Dictionary of (lowercase) header name to value.\n\n Returns:\n True if the response was an error, else False.\n "
] |
Please provide a description of the function:def __write_error(self, status_code, error_message=None):
if error_message is None:
error_message = httplib.responses[status_code]
status = '%d %s' % (status_code, httplib.responses[status_code])
message = EndpointsErrorMessage(
state=EndpointsErrorMessage.State.APPLICATION_ERROR,
error_message=error_message)
return status, self.__PROTOJSON.encode_message(message) | [
"Return the HTTP status line and body for a given error code and message.\n\n Args:\n status_code: HTTP status code to be returned.\n error_message: Error message to be returned.\n\n Returns:\n Tuple (http_status, body):\n http_status: HTTP status line, e.g. 200 OK.\n body: Body of the HTTP request.\n "
] |
Please provide a description of the function:def protorpc_to_endpoints_error(self, status, body):
try:
rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body)
except (ValueError, messages.ValidationError):
rpc_error = remote.RpcStatus()
if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR:
# Try to map to HTTP error code.
error_class = _ERROR_NAME_MAP.get(rpc_error.error_name)
if error_class:
status, body = self.__write_error(error_class.http_status,
rpc_error.error_message)
return status, body | [
"Convert a ProtoRPC error to the format expected by Google Endpoints.\n\n If the body does not contain an ProtoRPC message in state APPLICATION_ERROR\n the status and body will be returned unchanged.\n\n Args:\n status: HTTP status of the response from the backend\n body: JSON-encoded error in format expected by Endpoints frontend.\n\n Returns:\n Tuple of (http status, body)\n "
] |
Please provide a description of the function:def _add_dispatcher(self, path_regex, dispatch_function):
self._dispatchers.append((re.compile(path_regex), dispatch_function)) | [
"Add a request path and dispatch handler.\n\n Args:\n path_regex: A string regex, the path to match against incoming requests.\n dispatch_function: The function to call for these requests. The function\n should take (request, start_response) as arguments and\n return the contents of the response body.\n "
] |
Please provide a description of the function:def dispatch(self, request, start_response):
# Check if this matches any of our special handlers.
dispatched_response = self.dispatch_non_api_requests(request,
start_response)
if dispatched_response is not None:
return dispatched_response
# Call the service.
try:
return self.call_backend(request, start_response)
except errors.RequestError as error:
return self._handle_request_error(request, error, start_response) | [
"Handles dispatch to apiserver handlers.\n\n This typically ends up calling start_response and returning the entire\n body of the response.\n\n Args:\n request: An ApiRequest, the request from the user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string, the body of the response.\n "
] |
Please provide a description of the function:def dispatch_non_api_requests(self, request, start_response):
for path_regex, dispatch_function in self._dispatchers:
if path_regex.match(request.relative_url):
return dispatch_function(request, start_response)
if request.http_method == 'OPTIONS':
cors_handler = self._create_cors_handler(request)
if cors_handler.allow_cors_request:
# The server returns 200 rather than 204, for some reason.
return util.send_wsgi_response('200', [], '', start_response,
cors_handler)
return None | [
"Dispatch this request if this is a request to a reserved URL.\n\n If the request matches one of our reserved URLs, this calls\n start_response and returns the response body. This also handles OPTIONS\n CORS requests.\n\n Args:\n request: An ApiRequest, the request from the user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n None if the request doesn't match one of the reserved URLs this\n handles. Otherwise, returns the response body.\n "
] |
Please provide a description of the function:def handle_api_explorer_request(self, request, start_response):
redirect_url = self._get_explorer_redirect_url(
request.server, request.port, request.base_path)
return util.send_wsgi_redirect_response(redirect_url, start_response) | [
"Handler for requests to {base_path}/explorer.\n\n This calls start_response and returns the response body.\n\n Args:\n request: An ApiRequest, the request from the user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the response body (which is empty, in this case).\n "
] |
Please provide a description of the function:def handle_api_static_request(self, request, start_response):
if request.path == PROXY_PATH:
return util.send_wsgi_response('200 OK',
[('Content-Type',
'text/html')],
PROXY_HTML, start_response)
else:
_logger.debug('Unknown static url requested: %s',
request.relative_url)
return util.send_wsgi_response('404 Not Found', [('Content-Type',
'text/plain')], 'Not Found',
start_response) | [
"Handler for requests to {base_path}/static/.*.\n\n This calls start_response and returns the response body.\n\n Args:\n request: An ApiRequest, the request from the user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the response body.\n "
] |
Please provide a description of the function:def verify_response(response, status_code, content_type=None):
status = int(response.status.split(' ', 1)[0])
if status != status_code:
return False
if content_type is None:
return True
for header, value in response.headers:
if header.lower() == 'content-type':
return value == content_type
# If we fall through to here, the verification has failed, so return False.
return False | [
"Verifies that a response has the expected status and content type.\n\n Args:\n response: The ResponseTuple to be checked.\n status_code: An int, the HTTP status code to be compared with response\n status.\n content_type: A string with the acceptable Content-Type header value.\n None allows any content type.\n\n Returns:\n True if both status_code and content_type match, else False.\n "
] |
Please provide a description of the function:def prepare_backend_environ(self, host, method, relative_url, headers, body,
source_ip, port):
if isinstance(body, unicode):
body = body.encode('ascii')
url = urlparse.urlsplit(relative_url)
if port != 80:
host = '%s:%s' % (host, port)
else:
host = host
environ = {'CONTENT_LENGTH': str(len(body)),
'PATH_INFO': url.path,
'QUERY_STRING': url.query,
'REQUEST_METHOD': method,
'REMOTE_ADDR': source_ip,
'SERVER_NAME': host,
'SERVER_PORT': str(port),
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.errors': cStringIO.StringIO(),
'wsgi.multithread': True,
'wsgi.multiprocess': True,
'wsgi.input': cStringIO.StringIO(body)}
util.put_headers_in_environ(headers, environ)
environ['HTTP_HOST'] = host
return environ | [
"Build an environ object for the backend to consume.\n\n Args:\n host: A string containing the host serving the request.\n method: A string containing the HTTP method of the request.\n relative_url: A string containing path and query string of the request.\n headers: A list of (key, value) tuples where key and value are both\n strings.\n body: A string containing the request body.\n source_ip: The source IP address for the request.\n port: The port to which to direct the request.\n\n Returns:\n An environ object with all the information necessary for the backend to\n process the request.\n "
] |
Please provide a description of the function:def call_backend(self, orig_request, start_response):
method_config, params = self.lookup_rest_method(orig_request)
if not method_config:
cors_handler = self._create_cors_handler(orig_request)
return util.send_wsgi_not_found_response(start_response,
cors_handler=cors_handler)
# Prepare the request for the back end.
transformed_request = self.transform_request(
orig_request, params, method_config)
# Check if this call is for the Discovery service. If so, route
# it to our Discovery handler.
discovery = discovery_service.DiscoveryService(
self.config_manager, self._backend)
discovery_response = discovery.handle_discovery_request(
transformed_request.path, transformed_request, start_response)
if discovery_response:
return discovery_response
url = transformed_request.base_path + transformed_request.path
transformed_request.headers['Content-Type'] = 'application/json'
transformed_environ = self.prepare_backend_environ(
orig_request.server, 'POST', url, transformed_request.headers.items(),
transformed_request.body, transformed_request.source_ip,
orig_request.port)
# Send the transformed request to the backend app and capture the response.
with util.StartResponseProxy() as start_response_proxy:
body_iter = self._backend(transformed_environ, start_response_proxy.Proxy)
status = start_response_proxy.response_status
headers = start_response_proxy.response_headers
# Get response body
body = start_response_proxy.response_body
# In case standard WSGI behavior is implemented later...
if not body:
body = ''.join(body_iter)
return self.handle_backend_response(orig_request, transformed_request,
status, headers, body, method_config,
start_response) | [
"Generate API call (from earlier-saved request).\n\n This calls start_response and returns the response body.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the response body.\n "
] |
Please provide a description of the function:def handle_backend_response(self, orig_request, backend_request,
response_status, response_headers,
response_body, method_config, start_response):
# Verify that the response is json. If it isn't treat, the body as an
# error message and wrap it in a json error response.
for header, value in response_headers:
if (header.lower() == 'content-type' and
not value.lower().startswith('application/json')):
return self.fail_request(orig_request,
'Non-JSON reply: %s' % response_body,
start_response)
self.check_error_response(response_body, response_status)
# Check if the response from the API was empty. Empty REST responses
# generate a HTTP 204.
empty_response = self.check_empty_response(orig_request, method_config,
start_response)
if empty_response is not None:
return empty_response
body = self.transform_rest_response(response_body)
cors_handler = self._create_cors_handler(orig_request)
return util.send_wsgi_response(response_status, response_headers, body,
start_response, cors_handler=cors_handler) | [
"Handle backend response, transforming output as needed.\n\n This calls start_response and returns the response body.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n backend_request: An ApiRequest, the transformed request that was\n sent to the backend handler.\n response_status: A string, the status from the response.\n response_headers: A dict, the headers from the response.\n response_body: A string, the body of the response.\n method_config: A dict, the API config of the method to be called.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the response body.\n "
] |
Please provide a description of the function:def fail_request(self, orig_request, message, start_response):
cors_handler = self._create_cors_handler(orig_request)
return util.send_wsgi_error_response(
message, start_response, cors_handler=cors_handler) | [
"Write an immediate failure response to outfile, no redirect.\n\n This calls start_response and returns the error body.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n message: A string containing the error message to be displayed to user.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the body of the error response.\n "
] |
Please provide a description of the function:def lookup_rest_method(self, orig_request):
method_name, method, params = self.config_manager.lookup_rest_method(
orig_request.path, orig_request.request_uri, orig_request.http_method)
orig_request.method_name = method_name
return method, params | [
"Looks up and returns rest method for the currently-pending request.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n\n Returns:\n A tuple of (method descriptor, parameters), or (None, None) if no method\n was found for the current request.\n "
] |
Please provide a description of the function:def transform_request(self, orig_request, params, method_config):
method_params = method_config.get('request', {}).get('parameters', {})
request = self.transform_rest_request(orig_request, params, method_params)
request.path = method_config.get('rosyMethod', '')
return request | [
"Transforms orig_request to apiserving request.\n\n This method uses orig_request to determine the currently-pending request\n and returns a new transformed request ready to send to the backend. This\n method accepts a rest-style or RPC-style request.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n params: A dictionary containing path parameters for rest requests, or\n None for an RPC request.\n method_config: A dict, the API config of the method to be called.\n\n Returns:\n An ApiRequest that's a copy of the current request, modified so it can\n be sent to the backend. The path is updated and parts of the body or\n other properties may also be changed.\n "
] |
Please provide a description of the function:def _add_message_field(self, field_name, value, params):
if '.' not in field_name:
params[field_name] = value
return
root, remaining = field_name.split('.', 1)
sub_params = params.setdefault(root, {})
self._add_message_field(remaining, value, sub_params) | [
"Converts a . delimitied field name to a message field in parameters.\n\n This adds the field to the params dict, broken out so that message\n parameters appear as sub-dicts within the outer param.\n\n For example:\n {'a.b.c': ['foo']}\n becomes:\n {'a': {'b': {'c': ['foo']}}}\n\n Args:\n field_name: A string containing the '.' delimitied name to be converted\n into a dictionary.\n value: The value to be set.\n params: The dictionary holding all the parameters, where the value is\n eventually set.\n "
] |
Please provide a description of the function:def _update_from_body(self, destination, source):
for key, value in source.iteritems():
destination_value = destination.get(key)
if isinstance(value, dict) and isinstance(destination_value, dict):
self._update_from_body(destination_value, value)
else:
destination[key] = value | [
"Updates the dictionary for an API payload with the request body.\n\n The values from the body should override those already in the payload, but\n for nested fields (message objects) the values can be combined\n recursively.\n\n Args:\n destination: A dictionary containing an API payload parsed from the\n path and query parameters in a request.\n source: A dictionary parsed from the body of the request.\n "
] |
Please provide a description of the function:def transform_rest_request(self, orig_request, params, method_parameters):
request = orig_request.copy()
body_json = {}
# Handle parameters from the URL path.
for key, value in params.iteritems():
# Values need to be in a list to interact with query parameter values
# and to account for case of repeated parameters
body_json[key] = [value]
# Add in parameters from the query string.
if request.parameters:
# For repeated elements, query and path work together
for key, value in request.parameters.iteritems():
if key in body_json:
body_json[key] = value + body_json[key]
else:
body_json[key] = value
# Validate all parameters we've merged so far and convert any '.' delimited
# parameters to nested parameters. We don't use iteritems since we may
# modify body_json within the loop. For instance, 'a.b' is not a valid key
# and would be replaced with 'a'.
for key, value in body_json.items():
current_parameter = method_parameters.get(key, {})
repeated = current_parameter.get('repeated', False)
if not repeated:
body_json[key] = body_json[key][0]
# Order is important here. Parameter names are dot-delimited in
# parameters instead of nested in dictionaries as a message field is, so
# we need to call transform_parameter_value on them before calling
# _add_message_field.
body_json[key] = parameter_converter.transform_parameter_value(
key, body_json[key], current_parameter)
# Remove the old key and try to convert to nested message value
message_value = body_json.pop(key)
self._add_message_field(key, message_value, body_json)
# Add in values from the body of the request.
if request.body_json:
self._update_from_body(body_json, request.body_json)
request.body_json = body_json
request.body = json.dumps(request.body_json)
return request | [
"Translates a Rest request into an apiserving request.\n\n This makes a copy of orig_request and transforms it to apiserving\n format (moving request parameters to the body).\n\n The request can receive values from the path, query and body and combine\n them before sending them along to the backend. In cases of collision,\n objects from the body take precedence over those from the query, which in\n turn take precedence over those from the path.\n\n In the case that a repeated value occurs in both the query and the path,\n those values can be combined, but if that value also occurred in the body,\n it would override any other values.\n\n In the case of nested values from message fields, non-colliding values\n from subfields can be combined. For example, if '?a.c=10' occurs in the\n query string and \"{'a': {'b': 11}}\" occurs in the body, then they will be\n combined as\n\n {\n 'a': {\n 'b': 11,\n 'c': 10,\n }\n }\n\n before being sent to the backend.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n params: A dict with URL path parameters extracted by the config_manager\n lookup.\n method_parameters: A dictionary containing the API configuration for the\n parameters for the request.\n\n Returns:\n A copy of the current request that's been modified so it can be sent\n to the backend. The body is updated to include parameters from the\n URL.\n "
] |
Please provide a description of the function:def check_error_response(self, body, status):
status_code = int(status.split(' ', 1)[0])
if status_code >= 300:
raise errors.BackendError(body, status) | [
"Raise an exception if the response from the backend was an error.\n\n Args:\n body: A string containing the backend response body.\n status: A string containing the backend response status.\n\n Raises:\n BackendError if the response is an error.\n "
] |
Please provide a description of the function:def check_empty_response(self, orig_request, method_config, start_response):
response_config = method_config.get('response', {}).get('body')
if response_config == 'empty':
# The response to this function should be empty. We should return a 204.
# Note that it's possible that the backend returned something, but we'll
# ignore it. This matches the behavior in the Endpoints server.
cors_handler = self._create_cors_handler(orig_request)
return util.send_wsgi_no_content_response(start_response, cors_handler) | [
"If the response from the backend is empty, return a HTTP 204 No Content.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n method_config: A dict, the API config of the method to be called.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n If the backend response was empty, this returns a string containing the\n response body that should be returned to the user. If the backend\n response wasn't empty, this returns None, indicating that we should not\n exit early with a 204.\n "
] |
Please provide a description of the function:def transform_rest_response(self, response_body):
body_json = json.loads(response_body)
return json.dumps(body_json, indent=1, sort_keys=True) | [
"Translates an apiserving REST response so it's ready to return.\n\n Currently, the only thing that needs to be fixed here is indentation,\n so it's consistent with what the live app will return.\n\n Args:\n response_body: A string containing the backend response.\n\n Returns:\n A reformatted version of the response JSON.\n "
] |
Please provide a description of the function:def _handle_request_error(self, orig_request, error, start_response):
headers = [('Content-Type', 'application/json')]
status_code = error.status_code()
body = error.rest_error()
response_status = '%d %s' % (status_code,
httplib.responses.get(status_code,
'Unknown Error'))
cors_handler = self._create_cors_handler(orig_request)
return util.send_wsgi_response(response_status, headers, body,
start_response, cors_handler=cors_handler) | [
"Handle a request error, converting it to a WSGI response.\n\n Args:\n orig_request: An ApiRequest, the original request from the user.\n error: A RequestError containing information about the error.\n start_response: A function with semantics defined in PEP-333.\n\n Returns:\n A string containing the response body.\n "
] |
Please provide a description of the function:def _WriteFile(output_path, name, content):
path = os.path.join(output_path, name)
with open(path, 'wb') as f:
f.write(content)
return path | [
"Write given content to a file in a given directory.\n\n Args:\n output_path: The directory to store the file in.\n name: The name of the file to store the content in.\n content: The content to write to the file.close\n\n Returns:\n The full path to the written file.\n "
] |
Please provide a description of the function:def GenApiConfig(service_class_names, config_string_generator=None,
hostname=None, application_path=None, **additional_kwargs):
# First, gather together all the different APIs implemented by these
# classes. There may be fewer APIs than service classes. Each API is
# uniquely identified by (name, version). Order needs to be preserved here,
# so APIs that were listed first are returned first.
api_service_map = collections.OrderedDict()
resolved_services = []
for service_class_name in service_class_names:
module_name, base_service_class_name = service_class_name.rsplit('.', 1)
module = __import__(module_name, fromlist=base_service_class_name)
service = getattr(module, base_service_class_name)
if hasattr(service, 'get_api_classes'):
resolved_services.extend(service.get_api_classes())
elif (not isinstance(service, type) or
not issubclass(service, remote.Service)):
raise TypeError('%s is not a ProtoRPC service' % service_class_name)
else:
resolved_services.append(service)
for resolved_service in resolved_services:
services = api_service_map.setdefault(
(resolved_service.api_info.name, resolved_service.api_info.api_version), [])
services.append(resolved_service)
# If hostname isn't specified in the API or on the command line, we'll
# try to build it from information in app.yaml.
app_yaml_hostname = _GetAppYamlHostname(application_path)
service_map = collections.OrderedDict()
config_string_generator = (
config_string_generator or api_config.ApiConfigGenerator())
for api_info, services in api_service_map.iteritems():
assert services, 'An API must have at least one ProtoRPC service'
# Only override hostname if None. Hostname will be the same for all
# services within an API, since it's stored in common info.
hostname = services[0].api_info.hostname or hostname or app_yaml_hostname
# Map each API by name-version.
service_map['%s-%s' % api_info] = (
config_string_generator.pretty_print_config_to_json(
services, hostname=hostname, **additional_kwargs))
return service_map | [
"Write an API configuration for endpoints annotated ProtoRPC services.\n\n Args:\n service_class_names: A list of fully qualified ProtoRPC service classes.\n config_string_generator: A generator object that produces API config strings\n using its pretty_print_config_to_json method.\n hostname: A string hostname which will be used as the default version\n hostname. If no hostname is specificied in the @endpoints.api decorator,\n this value is the fallback.\n application_path: A string with the path to the AppEngine application.\n\n Raises:\n TypeError: If any service classes don't inherit from remote.Service.\n messages.DefinitionNotFoundError: If a service can't be found.\n\n Returns:\n A map from service names to a string containing the API configuration of the\n service in JSON format.\n "
] |
Please provide a description of the function:def _GetAppYamlHostname(application_path, open_func=open):
try:
app_yaml_file = open_func(os.path.join(application_path or '.', 'app.yaml'))
config = yaml.safe_load(app_yaml_file.read())
except IOError:
# Couldn't open/read app.yaml.
return None
application = config.get('application')
if not application:
return None
if ':' in application:
# Don't try to deal with alternate domains.
return None
# If there's a prefix ending in a '~', strip it.
tilde_index = application.rfind('~')
if tilde_index >= 0:
application = application[tilde_index + 1:]
if not application:
return None
return '%s.appspot.com' % application | [
"Build the hostname for this app based on the name in app.yaml.\n\n Args:\n application_path: A string with the path to the AppEngine application. This\n should be the directory containing the app.yaml file.\n open_func: Function to call to open a file. Used to override the default\n open function in unit tests.\n\n Returns:\n A hostname, usually in the form of \"myapp.appspot.com\", based on the\n application name in the app.yaml file. If the file can't be found or\n there's a problem building the name, this will return None.\n "
] |
Please provide a description of the function:def _GenDiscoveryDoc(service_class_names,
output_path, hostname=None,
application_path=None):
output_files = []
service_configs = GenApiConfig(
service_class_names, hostname=hostname,
config_string_generator=discovery_generator.DiscoveryGenerator(),
application_path=application_path)
for api_name_version, config in service_configs.iteritems():
discovery_name = api_name_version + '.discovery'
output_files.append(_WriteFile(output_path, discovery_name, config))
return output_files | [
"Write discovery documents generated from the service classes to file.\n\n Args:\n service_class_names: A list of fully qualified ProtoRPC service names.\n output_path: The directory to output the discovery docs to.\n hostname: A string hostname which will be used as the default version\n hostname. If no hostname is specificied in the @endpoints.api decorator,\n this value is the fallback. Defaults to None.\n application_path: A string containing the path to the AppEngine app.\n\n Returns:\n A list of discovery doc filenames.\n "
] |
Please provide a description of the function:def _GenOpenApiSpec(service_class_names, output_path, hostname=None,
application_path=None, x_google_api_name=False):
output_files = []
service_configs = GenApiConfig(
service_class_names, hostname=hostname,
config_string_generator=openapi_generator.OpenApiGenerator(),
application_path=application_path,
x_google_api_name=x_google_api_name)
for api_name_version, config in service_configs.iteritems():
openapi_name = api_name_version.replace('-', '') + 'openapi.json'
output_files.append(_WriteFile(output_path, openapi_name, config))
return output_files | [
"Write openapi documents generated from the service classes to file.\n\n Args:\n service_class_names: A list of fully qualified ProtoRPC service names.\n output_path: The directory to which to output the OpenAPI specs.\n hostname: A string hostname which will be used as the default version\n hostname. If no hostname is specified in the @endpoints.api decorator,\n this value is the fallback. Defaults to None.\n application_path: A string containing the path to the AppEngine app.\n\n Returns:\n A list of OpenAPI spec filenames.\n "
] |
Please provide a description of the function:def _GenClientLib(discovery_path, language, output_path, build_system):
with open(discovery_path) as f:
discovery_doc = f.read()
client_name = re.sub(r'\.discovery$', '.zip',
os.path.basename(discovery_path))
return _GenClientLibFromContents(discovery_doc, language, output_path,
build_system, client_name) | [
"Write a client library from a discovery doc.\n\n Args:\n discovery_path: Path to the discovery doc used to generate the client\n library.\n language: The client library language to generate. (java)\n output_path: The directory to output the client library zip to.\n build_system: The target build system for the client library language.\n\n Raises:\n IOError: If reading the discovery doc fails.\n ServerRequestException: If fetching the generated client library fails.\n\n Returns:\n The path to the zipped client library.\n "
] |
Please provide a description of the function:def _GenClientLibFromContents(discovery_doc, language, output_path,
build_system, client_name):
body = urllib.urlencode({'lang': language, 'content': discovery_doc,
'layout': build_system})
request = urllib2.Request(CLIENT_LIBRARY_BASE, body)
try:
with contextlib.closing(urllib2.urlopen(request)) as response:
content = response.read()
return _WriteFile(output_path, client_name, content)
except urllib2.HTTPError, error:
raise ServerRequestException(error) | [
"Write a client library from a discovery doc.\n\n Args:\n discovery_doc: A string, the contents of the discovery doc used to\n generate the client library.\n language: A string, the client library language to generate. (java)\n output_path: A string, the directory to output the client library zip to.\n build_system: A string, the target build system for the client language.\n client_name: A string, the filename used to save the client lib.\n\n Raises:\n IOError: If reading the discovery doc fails.\n ServerRequestException: If fetching the generated client library fails.\n\n Returns:\n The path to the zipped client library.\n "
] |
Please provide a description of the function:def _GetClientLib(service_class_names, language, output_path, build_system,
hostname=None, application_path=None):
client_libs = []
service_configs = GenApiConfig(
service_class_names, hostname=hostname,
config_string_generator=discovery_generator.DiscoveryGenerator(),
application_path=application_path)
for api_name_version, config in service_configs.iteritems():
client_name = api_name_version + '.zip'
client_libs.append(
_GenClientLibFromContents(config, language, output_path,
build_system, client_name))
return client_libs | [
"Fetch client libraries from a cloud service.\n\n Args:\n service_class_names: A list of fully qualified ProtoRPC service names.\n language: The client library language to generate. (java)\n output_path: The directory to output the discovery docs to.\n build_system: The target build system for the client library language.\n hostname: A string hostname which will be used as the default version\n hostname. If no hostname is specificied in the @endpoints.api decorator,\n this value is the fallback. Defaults to None.\n application_path: A string containing the path to the AppEngine app.\n\n Returns:\n A list of paths to client libraries.\n "
] |
Please provide a description of the function:def _GenApiConfigCallback(args, api_func=GenApiConfig):
service_configs = api_func(args.service,
hostname=args.hostname,
application_path=args.application)
for api_name_version, config in service_configs.iteritems():
_WriteFile(args.output, api_name_version + '.api', config) | [
"Generate an api file.\n\n Args:\n args: An argparse.Namespace object to extract parameters from.\n api_func: A function that generates and returns an API configuration\n for a list of services.\n "
] |
Please provide a description of the function:def _GetClientLibCallback(args, client_func=_GetClientLib):
client_paths = client_func(
args.service, args.language, args.output, args.build_system,
hostname=args.hostname, application_path=args.application)
for client_path in client_paths:
print 'API client library written to %s' % client_path | [
"Generate discovery docs and client libraries to files.\n\n Args:\n args: An argparse.Namespace object to extract parameters from.\n client_func: A function that generates client libraries and stores them to\n files, accepting a list of service names, a client library language,\n an output directory, a build system for the client library language, and\n a hostname.\n "
] |
Please provide a description of the function:def _GenDiscoveryDocCallback(args, discovery_func=_GenDiscoveryDoc):
discovery_paths = discovery_func(args.service, args.output,
hostname=args.hostname,
application_path=args.application)
for discovery_path in discovery_paths:
print 'API discovery document written to %s' % discovery_path | [
"Generate discovery docs to files.\n\n Args:\n args: An argparse.Namespace object to extract parameters from\n discovery_func: A function that generates discovery docs and stores them to\n files, accepting a list of service names, a discovery doc format, and an\n output directory.\n "
] |
Please provide a description of the function:def _GenOpenApiSpecCallback(args, openapi_func=_GenOpenApiSpec):
openapi_paths = openapi_func(args.service, args.output,
hostname=args.hostname,
application_path=args.application,
x_google_api_name=args.x_google_api_name)
for openapi_path in openapi_paths:
print 'OpenAPI spec written to %s' % openapi_path | [
"Generate OpenAPI (Swagger) specs to files.\n\n Args:\n args: An argparse.Namespace object to extract parameters from\n openapi_func: A function that generates OpenAPI specs and stores them to\n files, accepting a list of service names and an output directory.\n "
] |
Please provide a description of the function:def _GenClientLibCallback(args, client_func=_GenClientLib):
client_path = client_func(args.discovery_doc[0], args.language, args.output,
args.build_system)
print 'API client library written to %s' % client_path | [
"Generate a client library to file.\n\n Args:\n args: An argparse.Namespace object to extract parameters from\n client_func: A function that generates client libraries and stores them to\n files, accepting a path to a discovery doc, a client library language, an\n output directory, and a build system for the client library language.\n "
] |
Please provide a description of the function:def MakeParser(prog):
def AddStandardOptions(parser, *args):
if 'application' in args:
parser.add_argument('-a', '--application', default='.',
help='The path to the Python App Engine App')
if 'format' in args:
# This used to be a valid option, allowing the user to select 'rest' or 'rpc',
# but now 'rest' is the only valid type. The argument remains so scripts using it
# won't break.
parser.add_argument('-f', '--format', default='rest',
choices=['rest'],
help='The requested API protocol type (ignored)')
if 'hostname' in args:
help_text = ('Default application hostname, if none is specified '
'for API service.')
parser.add_argument('--hostname', help=help_text)
if 'output' in args:
parser.add_argument('-o', '--output', default='.',
help='The directory to store output files')
if 'language' in args:
parser.add_argument('language',
help='The target output programming language')
if 'service' in args:
parser.add_argument('service', nargs='+',
help='Fully qualified service class name')
if 'discovery_doc' in args:
parser.add_argument('discovery_doc', nargs=1,
help='Path to the discovery document')
if 'build_system' in args:
parser.add_argument('-bs', '--build_system', default='default',
help='The target build system')
parser = _EndpointsParser(prog=prog)
subparsers = parser.add_subparsers(
title='subcommands', metavar='{%s}' % ', '.join(_VISIBLE_COMMANDS))
get_client_lib = subparsers.add_parser(
'get_client_lib', help=('Generates discovery documents and client '
'libraries from service classes'))
get_client_lib.set_defaults(callback=_GetClientLibCallback)
AddStandardOptions(get_client_lib, 'application', 'hostname', 'output',
'language', 'service', 'build_system')
get_discovery_doc = subparsers.add_parser(
'get_discovery_doc',
help='Generates discovery documents from service classes')
get_discovery_doc.set_defaults(callback=_GenDiscoveryDocCallback)
AddStandardOptions(get_discovery_doc, 'application', 'format', 'hostname',
'output', 'service')
get_openapi_spec = subparsers.add_parser(
'get_openapi_spec',
help='Generates OpenAPI (Swagger) specs from service classes')
get_openapi_spec.set_defaults(callback=_GenOpenApiSpecCallback)
AddStandardOptions(get_openapi_spec, 'application', 'hostname', 'output',
'service')
get_openapi_spec.add_argument('--x-google-api-name', action='store_true',
help="Add the 'x-google-api-name' field to the generated spec")
# Create an alias for get_openapi_spec called get_swagger_spec to support
# the old-style naming. This won't be a visible command, but it will still
# function to support legacy scripts.
get_swagger_spec = subparsers.add_parser(
'get_swagger_spec',
help='Generates OpenAPI (Swagger) specs from service classes')
get_swagger_spec.set_defaults(callback=_GenOpenApiSpecCallback)
AddStandardOptions(get_swagger_spec, 'application', 'hostname', 'output',
'service')
# By removing the help attribute, the following three actions won't be
# displayed in usage message
gen_api_config = subparsers.add_parser('gen_api_config')
gen_api_config.set_defaults(callback=_GenApiConfigCallback)
AddStandardOptions(gen_api_config, 'application', 'hostname', 'output',
'service')
gen_discovery_doc = subparsers.add_parser('gen_discovery_doc')
gen_discovery_doc.set_defaults(callback=_GenDiscoveryDocCallback)
AddStandardOptions(gen_discovery_doc, 'application', 'format', 'hostname',
'output', 'service')
gen_client_lib = subparsers.add_parser('gen_client_lib')
gen_client_lib.set_defaults(callback=_GenClientLibCallback)
AddStandardOptions(gen_client_lib, 'output', 'language', 'discovery_doc',
'build_system')
return parser | [
"Create an argument parser.\n\n Args:\n prog: The name of the program to use when outputting help text.\n\n Returns:\n An argparse.ArgumentParser built to specification.\n ",
"Add common endpoints options to a parser.\n\n Args:\n parser: The parser to add options to.\n *args: A list of option names to add. Possible names are: application,\n format, output, language, service, and discovery_doc.\n "
] |
Please provide a description of the function:def error(self, message):
# subcommands_quoted is the same as subcommands, except each value is
# surrounded with double quotes. This is done to match the standard
# output of the ArgumentParser, while hiding commands we don't want users
# to use, as they are no longer documented and only here for legacy use.
subcommands_quoted = ', '.join(
[repr(command) for command in _VISIBLE_COMMANDS])
subcommands = ', '.join(_VISIBLE_COMMANDS)
message = re.sub(
r'(argument {%s}: invalid choice: .*) \(choose from (.*)\)$'
% subcommands, r'\1 (choose from %s)' % subcommands_quoted, message)
super(_EndpointsParser, self).error(message) | [
"Override superclass to support customized error message.\n\n Error message needs to be rewritten in order to display visible commands\n only, when invalid command is called by user. Otherwise, hidden commands\n will be displayed in stderr, which is not expected.\n\n Refer the following argparse python documentation for detailed method\n information:\n http://docs.python.org/2/library/argparse.html#exiting-methods\n\n Args:\n message: original error message that will be printed to stderr\n "
] |
Please provide a description of the function:def _SetupPaths():
sdk_path = _FindSdkPath()
if sdk_path:
sys.path.append(sdk_path)
try:
import dev_appserver # pylint: disable=g-import-not-at-top
if hasattr(dev_appserver, 'fix_sys_path'):
dev_appserver.fix_sys_path()
else:
logging.warning(_NO_FIX_SYS_PATH_WARNING)
except ImportError:
logging.warning(_IMPORT_ERROR_WARNING)
else:
logging.warning(_NOT_FOUND_WARNING)
# Add the path above this directory, so we can import the endpoints package
# from the user's app code (rather than from another, possibly outdated SDK).
# pylint: disable=g-import-not-at-top
from google.appengine.ext import vendor
vendor.add(os.path.dirname(os.path.dirname(__file__))) | [
"Sets up the sys.path with special directories for endpointscfg.py."
] |
Please provide a description of the function:def _Enum(docstring, *names):
enums = dict(zip(names, range(len(names))))
reverse = dict((value, key) for key, value in enums.iteritems())
enums['reverse_mapping'] = reverse
enums['__doc__'] = docstring
return type('Enum', (object,), enums) | [
"Utility to generate enum classes used by annotations.\n\n Args:\n docstring: Docstring for the generated enum class.\n *names: Enum names.\n\n Returns:\n A class that contains enum names as attributes.\n "
] |
Please provide a description of the function:def _CheckType(value, check_type, name, allow_none=True):
if value is None and allow_none:
return
if not isinstance(value, check_type):
raise TypeError('%s type doesn\'t match %s.' % (name, check_type)) | [
"Check that the type of an object is acceptable.\n\n Args:\n value: The object whose type is to be checked.\n check_type: The type that the object must be an instance of.\n name: Name of the object, to be placed in any error messages.\n allow_none: True if value can be None, false if not.\n\n Raises:\n TypeError: If value is not an acceptable type.\n "
] |
Please provide a description of the function:def api(name, version, description=None, hostname=None, audiences=None,
scopes=None, allowed_client_ids=None, canonical_name=None,
auth=None, owner_domain=None, owner_name=None, package_path=None,
frontend_limits=None, title=None, documentation=None, auth_level=None,
issuers=None, namespace=None, api_key_required=None, base_path=None,
limit_definitions=None, use_request_uri=None):
if auth_level is not None:
_logger.warn(_AUTH_LEVEL_WARNING)
return _ApiDecorator(name, version, description=description,
hostname=hostname, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids,
canonical_name=canonical_name, auth=auth,
owner_domain=owner_domain, owner_name=owner_name,
package_path=package_path,
frontend_limits=frontend_limits, title=title,
documentation=documentation, auth_level=auth_level,
issuers=issuers, namespace=namespace,
api_key_required=api_key_required, base_path=base_path,
limit_definitions=limit_definitions,
use_request_uri=use_request_uri) | [
"Decorate a ProtoRPC Service class for use by the framework above.\n\n This decorator can be used to specify an API name, version, description, and\n hostname for your API.\n\n Sample usage (python 2.7):\n @endpoints.api(name='guestbook', version='v0.2',\n description='Guestbook API')\n class PostService(remote.Service):\n ...\n\n Sample usage (python 2.5):\n class PostService(remote.Service):\n ...\n endpoints.api(name='guestbook', version='v0.2',\n description='Guestbook API')(PostService)\n\n Sample usage if multiple classes implement one API:\n api_root = endpoints.api(name='library', version='v1.0')\n\n @api_root.api_class(resource_name='shelves')\n class Shelves(remote.Service):\n ...\n\n @api_root.api_class(resource_name='books', path='books')\n class Books(remote.Service):\n ...\n\n Args:\n name: string, Name of the API.\n version: string, Version of the API.\n description: string, Short description of the API (Default: None)\n hostname: string, Hostname of the API (Default: app engine default host)\n audiences: list of strings, Acceptable audiences for authentication.\n scopes: list of strings, Acceptable scopes for authentication.\n allowed_client_ids: list of strings, Acceptable client IDs for auth.\n canonical_name: string, the canonical name for the API, a more human\n readable version of the name.\n auth: ApiAuth instance, the authentication configuration information\n for this API.\n owner_domain: string, the domain of the person or company that owns\n this API. Along with owner_name, this provides hints to properly\n name client libraries for this API.\n owner_name: string, the name of the owner of this API. Along with\n owner_domain, this provides hints to properly name client libraries\n for this API.\n package_path: string, the \"package\" this API belongs to. This '/'\n delimited value specifies logical groupings of APIs. This is used by\n client libraries of this API.\n frontend_limits: ApiFrontEndLimits, optional query limits for unregistered\n developers.\n title: string, the human readable title of your API. It is exposed in the\n discovery service.\n documentation: string, a URL where users can find documentation about this\n version of the API. This will be surfaced in the API Explorer and GPE\n plugin to allow users to learn about your service.\n auth_level: enum from AUTH_LEVEL, frontend authentication level.\n issuers: dict, mapping auth issuer names to endpoints.Issuer objects.\n namespace: endpoints.Namespace, the namespace for the API.\n api_key_required: bool, whether a key is required to call into this API.\n base_path: string, the base path for all endpoints in this API.\n limit_definitions: list of endpoints.LimitDefinition objects, quota metric\n definitions for this API.\n use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO\n\n\n Returns:\n Class decorated with api_info attribute, an instance of ApiInfo.\n "
] |
Please provide a description of the function:def method(request_message=message_types.VoidMessage,
response_message=message_types.VoidMessage,
name=None,
path=None,
http_method='POST',
scopes=None,
audiences=None,
allowed_client_ids=None,
auth_level=None,
api_key_required=None,
metric_costs=None,
use_request_uri=None):
if auth_level is not None:
_logger.warn(_AUTH_LEVEL_WARNING)
# Default HTTP method if one is not specified.
DEFAULT_HTTP_METHOD = 'POST'
def apiserving_method_decorator(api_method):
request_body_class = None
request_params_class = None
if isinstance(request_message, resource_container.ResourceContainer):
remote_decorator = remote.method(request_message.combined_message_class,
response_message)
request_body_class = request_message.body_message_class()
request_params_class = request_message.parameters_message_class()
else:
remote_decorator = remote.method(request_message, response_message)
remote_method = remote_decorator(api_method)
def invoke_remote(service_instance, request):
# If the server didn't specify any auth information, build it now.
# pylint: disable=protected-access
users_id_token._maybe_set_current_user_vars(
invoke_remote, api_info=getattr(service_instance, 'api_info', None),
request=request)
# pylint: enable=protected-access
return remote_method(service_instance, request)
invoke_remote.remote = remote_method.remote
if isinstance(request_message, resource_container.ResourceContainer):
resource_container.ResourceContainer.add_to_cache(
invoke_remote.remote, request_message)
invoke_remote.method_info = _MethodInfo(
name=name or api_method.__name__, path=path or api_method.__name__,
http_method=http_method or DEFAULT_HTTP_METHOD,
scopes=scopes, audiences=audiences,
allowed_client_ids=allowed_client_ids, auth_level=auth_level,
api_key_required=api_key_required, metric_costs=metric_costs,
use_request_uri=use_request_uri,
request_body_class=request_body_class,
request_params_class=request_params_class)
invoke_remote.__name__ = invoke_remote.method_info.name
return invoke_remote
endpoints_util.check_list_type(scopes, (basestring, endpoints_types.OAuth2Scope), 'scopes')
endpoints_util.check_list_type(allowed_client_ids, basestring,
'allowed_client_ids')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
_CheckAudiences(audiences)
_CheckType(metric_costs, dict, 'metric_costs')
return apiserving_method_decorator | [
"Decorate a ProtoRPC Method for use by the framework above.\n\n This decorator can be used to specify a method name, path, http method,\n scopes, audiences, client ids and auth_level.\n\n Sample usage:\n @api_config.method(RequestMessage, ResponseMessage,\n name='insert', http_method='PUT')\n def greeting_insert(request):\n ...\n return response\n\n Args:\n request_message: Message type of expected request.\n response_message: Message type of expected response.\n name: string, Name of the method, prepended with <apiname>. to make it\n unique. (Default: python method name)\n path: string, Path portion of the URL to the method, for RESTful methods.\n http_method: string, HTTP method supported by the method. (Default: POST)\n scopes: list of string, OAuth2 token must contain one of these scopes.\n audiences: list of string, IdToken must contain one of these audiences.\n allowed_client_ids: list of string, Client IDs allowed to call the method.\n If None and auth_level is REQUIRED, no calls will be allowed.\n auth_level: enum from AUTH_LEVEL, Frontend auth level for the method.\n api_key_required: bool, whether a key is required to call the method\n metric_costs: dict with keys matching an API limit metric and values\n representing the cost for each successful call against that metric.\n use_request_uri: if true, match requests against REQUEST_URI instead of PATH_INFO\n\n Returns:\n 'apiserving_method_wrapper' function.\n\n Raises:\n TypeError: if the request_type or response_type parameters are not\n proper subclasses of messages.Message.\n ",
"Decorator for ProtoRPC method that configures Google's API server.\n\n Args:\n api_method: Original method being wrapped.\n\n Returns:\n Function responsible for actual invocation.\n Assigns the following attributes to invocation function:\n remote: Instance of RemoteInfo, contains remote method information.\n remote.request_type: Expected request type for remote method.\n remote.response_type: Response type returned from remote method.\n method_info: Instance of _MethodInfo, api method configuration.\n It is also assigned attributes corresponding to the aforementioned kwargs.\n\n Raises:\n TypeError: if the request_type or response_type parameters are not\n proper subclasses of messages.Message.\n KeyError: if the request_message is a ResourceContainer and the newly\n created remote method has been reference by the container before. This\n should never occur because a remote method is created once.\n "
] |
Please provide a description of the function:def is_same_api(self, other):
if not isinstance(other, _ApiInfo):
return False
# pylint: disable=protected-access
return self.__common_info is other.__common_info | [
"Check if this implements the same API as another _ApiInfo instance."
] |
Please provide a description of the function:def api_class(self, resource_name=None, path=None, audiences=None,
scopes=None, allowed_client_ids=None, auth_level=None,
api_key_required=None):
if auth_level is not None:
_logger.warn(_AUTH_LEVEL_WARNING)
def apiserving_api_decorator(api_class):
self.__classes.append(api_class)
api_class.api_info = _ApiInfo(
self.__common_info, resource_name=resource_name,
path=path, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids, auth_level=auth_level,
api_key_required=api_key_required)
return api_class
return apiserving_api_decorator | [
"Get a decorator for a class that implements an API.\n\n This can be used for single-class or multi-class implementations. It's\n used implicitly in simple single-class APIs that only use @api directly.\n\n Args:\n resource_name: string, Resource name for the class this decorates.\n (Default: None)\n path: string, Base path prepended to any method paths in the class this\n decorates. (Default: None)\n audiences: list of strings, Acceptable audiences for authentication.\n (Default: None)\n scopes: list of strings, Acceptable scopes for authentication.\n (Default: None)\n allowed_client_ids: list of strings, Acceptable client IDs for auth.\n (Default: None)\n auth_level: enum from AUTH_LEVEL, Frontend authentication level.\n (Default: None)\n api_key_required: bool, Whether a key is required to call into this API.\n (Default: None)\n\n Returns:\n A decorator function to decorate a class that implements an API.\n ",
"Decorator for ProtoRPC class that configures Google's API server.\n\n Args:\n api_class: remote.Service class, ProtoRPC service class being wrapped.\n\n Returns:\n Same class with API attributes assigned in api_info.\n "
] |
Please provide a description of the function:def __safe_name(self, method_name):
# Endpoints backend restricts what chars are allowed in a method name.
safe_name = re.sub(r'[^\.a-zA-Z0-9_]', '', method_name)
# Strip any number of leading underscores.
safe_name = safe_name.lstrip('_')
# Ensure the first character is lowercase.
# Slice from 0:1 rather than indexing [0] in case safe_name is length 0.
return safe_name[0:1].lower() + safe_name[1:] | [
"Restrict method name to a-zA-Z0-9_, first char lowercase."
] |
Please provide a description of the function:def get_path(self, api_info):
path = self.__path or ''
if path and path[0] == '/':
# Absolute path, ignoring any prefixes. Just strip off the leading /.
path = path[1:]
else:
# Relative path.
if api_info.path:
path = '%s%s%s' % (api_info.path, '/' if path else '', path)
# Verify that the path seems valid.
parts = path.split('/')
for n, part in enumerate(parts):
r = _VALID_PART_RE if n < len(parts) - 1 else _VALID_LAST_PART_RE
if part and '{' in part and '}' in part:
if not r.match(part):
raise api_exceptions.ApiConfigurationError(
'Invalid path segment: %s (part of %s)' % (part, path))
return path | [
"Get the path portion of the URL to the method (for RESTful methods).\n\n Request path can be specified in the method, and it could have a base\n path prepended to it.\n\n Args:\n api_info: API information for this API, possibly including a base path.\n This is the api_info property on the class that's been annotated for\n this API.\n\n Returns:\n This method's request path (not including the http://.../{base_path}\n prefix).\n\n Raises:\n ApiConfigurationError: If the path isn't properly formatted.\n "
] |
Please provide a description of the function:def method_id(self, api_info):
# This is done here for now because at __init__ time, the method is known
# but not the api, and thus not the api name. Later, in
# ApiConfigGenerator.__method_descriptor, the api name is known.
if api_info.resource_name:
resource_part = '.%s' % self.__safe_name(api_info.resource_name)
else:
resource_part = ''
return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part,
self.__safe_name(self.name)) | [
"Computed method name."
] |
Please provide a description of the function:def __field_to_subfields(self, field):
# Termination condition
if not isinstance(field, messages.MessageField):
return [[field]]
result = []
for subfield in sorted(field.message_type.all_fields(),
key=lambda f: f.number):
subfield_results = self.__field_to_subfields(subfield)
for subfields_list in subfield_results:
subfields_list.insert(0, field)
result.append(subfields_list)
return result | [
"Fully describes data represented by field, including the nested case.\n\n In the case that the field is not a message field, we have no fields nested\n within a message definition, so we can simply return that field. However, in\n the nested case, we can't simply describe the data with one field or even\n with one chain of fields.\n\n For example, if we have a message field\n\n m_field = messages.MessageField(RefClass, 1)\n\n which references a class with two fields:\n\n class RefClass(messages.Message):\n one = messages.StringField(1)\n two = messages.IntegerField(2)\n\n then we would need to include both one and two to represent all the\n data contained.\n\n Calling __field_to_subfields(m_field) would return:\n [\n [<MessageField \"m_field\">, <StringField \"one\">],\n [<MessageField \"m_field\">, <StringField \"two\">],\n ]\n\n If the second field was instead a message field\n\n class RefClass(messages.Message):\n one = messages.StringField(1)\n two = messages.MessageField(OtherRefClass, 2)\n\n referencing another class with two fields\n\n class OtherRefClass(messages.Message):\n three = messages.BooleanField(1)\n four = messages.FloatField(2)\n\n then we would need to recurse one level deeper for two.\n\n With this change, calling __field_to_subfields(m_field) would return:\n [\n [<MessageField \"m_field\">, <StringField \"one\">],\n [<MessageField \"m_field\">, <StringField \"two\">, <StringField \"three\">],\n [<MessageField \"m_field\">, <StringField \"two\">, <StringField \"four\">],\n ]\n\n Args:\n field: An instance of a subclass of messages.Field.\n\n Returns:\n A list of lists, where each sublist is a list of fields.\n "
] |
Please provide a description of the function:def __field_to_parameter_type(self, field):
# We use lowercase values for types (e.g. 'string' instead of 'STRING').
variant = field.variant
if variant == messages.Variant.MESSAGE:
raise TypeError('A message variant can\'t be used in a parameter.')
custom_variant_map = {
messages.Variant.SINT32: 'int32',
messages.Variant.SINT64: 'int64',
messages.Variant.BOOL: 'boolean',
messages.Variant.ENUM: 'string',
}
return custom_variant_map.get(variant) or variant.name.lower() | [
"Converts the field variant type into a string describing the parameter.\n\n Args:\n field: An instance of a subclass of messages.Field.\n\n Returns:\n A string corresponding to the variant enum of the field, with a few\n exceptions. In the case of signed ints, the 's' is dropped; for the BOOL\n variant, 'boolean' is used; and for the ENUM variant, 'string' is used.\n\n Raises:\n TypeError: if the field variant is a message variant.\n "
] |
Please provide a description of the function:def __get_path_parameters(self, path):
path_parameters_by_segment = {}
for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
first_segment = format_var_name.split('.', 1)[0]
matches = path_parameters_by_segment.setdefault(first_segment, [])
matches.append(format_var_name)
return path_parameters_by_segment | [
"Parses path paremeters from a URI path and organizes them by parameter.\n\n Some of the parameters may correspond to message fields, and so will be\n represented as segments corresponding to each subfield; e.g. first.second if\n the field \"second\" in the message field \"first\" is pulled from the path.\n\n The resulting dictionary uses the first segments as keys and each key has as\n value the list of full parameter values with first segment equal to the key.\n\n If the match path parameter is null, that part of the path template is\n ignored; this occurs if '{}' is used in a template.\n\n Args:\n path: String; a URI path, potentially with some parameters.\n\n Returns:\n A dictionary with strings as keys and list of strings as values.\n "
] |
Please provide a description of the function:def __validate_simple_subfield(self, parameter, field, segment_list,
_segment_index=0):
if _segment_index >= len(segment_list):
# In this case, the field is the final one, so should be simple type
if isinstance(field, messages.MessageField):
field_class = field.__class__.__name__
raise TypeError('Can\'t use messages in path. Subfield %r was '
'included but is a %s.' % (parameter, field_class))
return
segment = segment_list[_segment_index]
parameter += '.' + segment
try:
field = field.type.field_by_name(segment)
except (AttributeError, KeyError):
raise TypeError('Subfield %r from path does not exist.' % (parameter,))
self.__validate_simple_subfield(parameter, field, segment_list,
_segment_index=_segment_index + 1) | [
"Verifies that a proposed subfield actually exists and is a simple field.\n\n Here, simple means it is not a MessageField (nested).\n\n Args:\n parameter: String; the '.' delimited name of the current field being\n considered. This is relative to some root.\n field: An instance of a subclass of messages.Field. Corresponds to the\n previous segment in the path (previous relative to _segment_index),\n since this field should be a message field with the current segment\n as a field in the message class.\n segment_list: The full list of segments from the '.' delimited subfield\n being validated.\n _segment_index: Integer; used to hold the position of current segment so\n that segment_list can be passed as a reference instead of having to\n copy using segment_list[1:] at each step.\n\n Raises:\n TypeError: If the final subfield (indicated by _segment_index relative\n to the length of segment_list) is a MessageField.\n TypeError: If at any stage the lookup at a segment fails, e.g if a.b\n exists but a.b.c does not exist. This can happen either if a.b is not\n a message field or if a.b.c is not a property on the message class from\n a.b.\n "
] |
Please provide a description of the function:def __validate_path_parameters(self, field, path_parameters):
for param in path_parameters:
segment_list = param.split('.')
if segment_list[0] != field.name:
raise TypeError('Subfield %r can\'t come from field %r.'
% (param, field.name))
self.__validate_simple_subfield(field.name, field, segment_list[1:]) | [
"Verifies that all path parameters correspond to an existing subfield.\n\n Args:\n field: An instance of a subclass of messages.Field. Should be the root\n level property name in each path parameter in path_parameters. For\n example, if the field is called 'foo', then each path parameter should\n begin with 'foo.'.\n path_parameters: A list of Strings representing URI parameter variables.\n\n Raises:\n TypeError: If one of the path parameters does not start with field.name.\n "
] |
Please provide a description of the function:def __parameter_default(self, final_subfield):
if final_subfield.default:
if isinstance(final_subfield, messages.EnumField):
return final_subfield.default.name
else:
return final_subfield.default | [
"Returns default value of final subfield if it has one.\n\n If this subfield comes from a field list returned from __field_to_subfields,\n none of the fields in the subfield list can have a default except the final\n one since they all must be message fields.\n\n Args:\n final_subfield: A simple field from the end of a subfield list.\n\n Returns:\n The default value of the subfield, if any exists, with the exception of an\n enum field, which will have its value cast to a string.\n "
] |
Please provide a description of the function:def __parameter_enum(self, final_subfield):
if isinstance(final_subfield, messages.EnumField):
enum_descriptor = {}
for enum_value in final_subfield.type.to_dict().keys():
enum_descriptor[enum_value] = {'backendValue': enum_value}
return enum_descriptor | [
"Returns enum descriptor of final subfield if it is an enum.\n\n An enum descriptor is a dictionary with keys as the names from the enum and\n each value is a dictionary with a single key \"backendValue\" and value equal\n to the same enum name used to stored it in the descriptor.\n\n The key \"description\" can also be used next to \"backendValue\", but protorpc\n Enum classes have no way of supporting a description for each value.\n\n Args:\n final_subfield: A simple field from the end of a subfield list.\n\n Returns:\n The enum descriptor for the field, if it's an enum descriptor, else\n returns None.\n "
] |
Please provide a description of the function:def __parameter_descriptor(self, subfield_list):
descriptor = {}
final_subfield = subfield_list[-1]
# Required
if all(subfield.required for subfield in subfield_list):
descriptor['required'] = True
# Type
descriptor['type'] = self.__field_to_parameter_type(final_subfield)
# Default
default = self.__parameter_default(final_subfield)
if default is not None:
descriptor['default'] = default
# Repeated
if any(subfield.repeated for subfield in subfield_list):
descriptor['repeated'] = True
# Enum
enum_descriptor = self.__parameter_enum(final_subfield)
if enum_descriptor is not None:
descriptor['enum'] = enum_descriptor
return descriptor | [
"Creates descriptor for a parameter using the subfields that define it.\n\n Each parameter is defined by a list of fields, with all but the last being\n a message field and the final being a simple (non-message) field.\n\n Many of the fields in the descriptor are determined solely by the simple\n field at the end, though some (such as repeated and required) take the whole\n chain of fields into consideration.\n\n Args:\n subfield_list: List of fields describing the parameter.\n\n Returns:\n Dictionary containing a descriptor for the parameter described by the list\n of fields.\n "
] |
Please provide a description of the function:def __add_parameters_from_field(self, field, path_parameters,
params, param_order):
for subfield_list in self.__field_to_subfields(field):
descriptor = self.__parameter_descriptor(subfield_list)
qualified_name = '.'.join(subfield.name for subfield in subfield_list)
in_path = qualified_name in path_parameters
if descriptor.get('required', in_path):
descriptor['required'] = True
param_order.append(qualified_name)
params[qualified_name] = descriptor | [
"Adds all parameters in a field to a method parameters descriptor.\n\n Simple fields will only have one parameter, but a message field 'x' that\n corresponds to a message class with fields 'y' and 'z' will result in\n parameters 'x.y' and 'x.z', for example. The mapping from field to\n parameters is mostly handled by __field_to_subfields.\n\n Args:\n field: Field from which parameters will be added to the method descriptor.\n path_parameters: A list of parameters matched from a path for this field.\n For example for the hypothetical 'x' from above if the path was\n '/a/{x.z}/b/{other}' then this list would contain only the element\n 'x.z' since 'other' does not match to this field.\n params: Dictionary with parameter names as keys and parameter descriptors\n as values. This will be updated for each parameter in the field.\n param_order: List of required parameter names to give them an order in the\n descriptor. All required parameters in the field will be added to this\n list.\n "
] |
Please provide a description of the function:def __params_descriptor(self, message_type, request_kind, path, method_id):
path_parameter_dict = self.__get_path_parameters(path)
if not isinstance(message_type, resource_container.ResourceContainer):
if path_parameter_dict:
_logger.warning('Method %s specifies path parameters but you are not '
'using a ResourceContainer; instead, you are using %r. '
'This will fail in future releases; please switch to '
'using ResourceContainer as soon as possible.',
method_id, type(message_type))
return self.__params_descriptor_without_container(
message_type, request_kind, path)
# From here, we can assume message_type is a ResourceContainer
message_type = message_type.parameters_message_class()
params = {}
param_order = []
# Make sure all path parameters are covered.
for field_name, matched_path_parameters in path_parameter_dict.iteritems():
field = message_type.field_by_name(field_name)
self.__validate_path_parameters(field, matched_path_parameters)
# Add all fields, sort by field.number since we have parameterOrder.
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order | [
"Describe the parameters of a method.\n\n If the message_type is not a ResourceContainer, will fall back to\n __params_descriptor_without_container (which will eventually be deprecated).\n\n If the message type is a ResourceContainer, then all path/query parameters\n will come from the ResourceContainer This method will also make sure all\n path parameters are covered by the message fields.\n\n Args:\n message_type: messages.Message or ResourceContainer class, Message with\n parameters to describe.\n request_kind: The type of request being made.\n path: string, HTTP path to method.\n method_id: string, Unique method identifier (e.g. 'myapi.items.method')\n\n Returns:\n A tuple (dict, list of string): Descriptor of the parameters, Order of the\n parameters.\n "
] |
Please provide a description of the function:def __request_message_descriptor(self, request_kind, message_type, method_id,
path):
descriptor = {}
params, param_order = self.__params_descriptor(message_type, request_kind,
path, method_id)
if isinstance(message_type, resource_container.ResourceContainer):
message_type = message_type.body_message_class()
if (request_kind == self.__NO_BODY or
message_type == message_types.VoidMessage()):
descriptor['body'] = 'empty'
else:
descriptor['body'] = 'autoTemplate(backendRequest)'
descriptor['bodyName'] = 'resource'
self.__request_schema[method_id] = self.__parser.add_message(
message_type.__class__)
if params:
descriptor['parameters'] = params
if param_order:
descriptor['parameterOrder'] = param_order
return descriptor | [
"Describes the parameters and body of the request.\n\n Args:\n request_kind: The type of request being made.\n message_type: messages.Message or ResourceContainer class. The message to\n describe.\n method_id: string, Unique method identifier (e.g. 'myapi.items.method')\n path: string, HTTP path to method.\n\n Returns:\n Dictionary describing the request.\n\n Raises:\n ValueError: if the method path and request required fields do not match\n "
] |
Please provide a description of the function:def __method_descriptor(self, service, method_info,
rosy_method, protorpc_method_info):
descriptor = {}
request_message_type = (resource_container.ResourceContainer.
get_request_message(protorpc_method_info.remote))
request_kind = self.__get_request_kind(method_info)
remote_method = protorpc_method_info.remote
descriptor['path'] = method_info.get_path(service.api_info)
descriptor['httpMethod'] = method_info.http_method
descriptor['rosyMethod'] = rosy_method
descriptor['request'] = self.__request_message_descriptor(
request_kind, request_message_type,
method_info.method_id(service.api_info),
descriptor['path'])
descriptor['response'] = self.__response_message_descriptor(
remote_method.response_type(), method_info.method_id(service.api_info))
# Audiences, scopes, allowed_client_ids and auth_level could be set at
# either the method level or the API level. Allow an empty list at the
# method level to override the setting at the API level.
scopes = (method_info.scopes
if method_info.scopes is not None
else service.api_info.scopes)
if scopes:
descriptor['scopes'] = scopes
audiences = (method_info.audiences
if method_info.audiences is not None
else service.api_info.audiences)
if audiences:
descriptor['audiences'] = audiences
allowed_client_ids = (method_info.allowed_client_ids
if method_info.allowed_client_ids is not None
else service.api_info.allowed_client_ids)
if allowed_client_ids:
descriptor['clientIds'] = allowed_client_ids
if remote_method.method.__doc__:
descriptor['description'] = remote_method.method.__doc__
auth_level = (method_info.auth_level
if method_info.auth_level is not None
else service.api_info.auth_level)
if auth_level is not None:
descriptor['authLevel'] = AUTH_LEVEL.reverse_mapping[auth_level]
descriptor['useRequestUri'] = method_info.use_request_uri(service.api_info)
return descriptor | [
"Describes a method.\n\n Args:\n service: endpoints.Service, Implementation of the API as a service.\n method_info: _MethodInfo, Configuration for the method.\n rosy_method: string, ProtoRPC method name prefixed with the\n name of the service.\n protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC\n description of the method.\n\n Returns:\n Dictionary describing the method.\n "
] |
Please provide a description of the function:def __schema_descriptor(self, services):
methods_desc = {}
for service in services:
protorpc_methods = service.all_remote_methods()
for protorpc_method_name in protorpc_methods.iterkeys():
rosy_method = '%s.%s' % (service.__name__, protorpc_method_name)
method_id = self.__id_from_name[rosy_method]
request_response = {}
request_schema_id = self.__request_schema.get(method_id)
if request_schema_id:
request_response['request'] = {
'$ref': request_schema_id
}
response_schema_id = self.__response_schema.get(method_id)
if response_schema_id:
request_response['response'] = {
'$ref': response_schema_id
}
methods_desc[rosy_method] = request_response
descriptor = {
'methods': methods_desc,
'schemas': self.__parser.schemas(),
}
return descriptor | [
"Descriptor for the all the JSON Schema used.\n\n Args:\n services: List of protorpc.remote.Service instances implementing an\n api/version.\n\n Returns:\n Dictionary containing all the JSON Schema used in the service.\n "
] |
Please provide a description of the function:def __get_merged_api_info(self, services):
merged_api_info = services[0].api_info
# Verify that, if there are multiple classes here, they're allowed to
# implement the same API.
for service in services[1:]:
if not merged_api_info.is_same_api(service.api_info):
raise api_exceptions.ApiConfigurationError(
_MULTICLASS_MISMATCH_ERROR_TEMPLATE % (service.api_info.name,
service.api_info.api_version))
return merged_api_info | [
"Builds a description of an API.\n\n Args:\n services: List of protorpc.remote.Service instances implementing an\n api/version.\n\n Returns:\n The _ApiInfo object to use for the API that the given services implement.\n\n Raises:\n ApiConfigurationError: If there's something wrong with the API\n configuration, such as a multiclass API decorated with different API\n descriptors (see the docstring for api()).\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.