text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Also updates the state of the containing oneof in the parent message. <END_TASK> <USER_TASK:> Description: def Modified(self): """Also updates the state of the containing oneof in the parent message."""
try: self._parent_message_weakref._UpdateOneofState(self._field) super(_OneofListener, self).Modified() except ReferenceError: pass
<SYSTEM_TASK:> Returns a string containing the name of an enum value. <END_TASK> <USER_TASK:> Description: def Name(self, number): """Returns a string containing the name of an enum value."""
if number in self._enum_type.values_by_number: return self._enum_type.values_by_number[number].name raise ValueError('Enum %s has no name defined for value %d' % ( self._enum_type.name, number))
<SYSTEM_TASK:> Returns the value coresponding to the given enum name. <END_TASK> <USER_TASK:> Description: def Value(self, name): """Returns the value coresponding to the given enum name."""
if name in self._enum_type.values_by_name: return self._enum_type.values_by_name[name].number raise ValueError('Enum %s has no value defined for name %s' % ( self._enum_type.name, name))
<SYSTEM_TASK:> Load global singleton of tcmps lib handler. <END_TASK> <USER_TASK:> Description: def _load_tcmps_lib(): """ Load global singleton of tcmps lib handler. This function is used not used at the top level, so that the shared library is loaded lazily only when needed. """
global _g_TCMPS_LIB if _g_TCMPS_LIB is None: # This library requires macOS 10.14 or above if _mac_ver() < (10, 14): return None # The symbols defined in libtcmps are now exposed directly by # libunity_shared. Eventually the object_detector and # activity_classifier toolkits will use the same Python/C++ bridge as # the other toolkits, and this usage of ctypes will go away. file_dir = _os.path.dirname(__file__) lib_path = _os.path.abspath(_os.path.join(file_dir, _os.pardir, 'libunity_shared.dylib')) try: _g_TCMPS_LIB = _ctypes.CDLL(lib_path, _ctypes.RTLD_LOCAL) except OSError: pass return _g_TCMPS_LIB
<SYSTEM_TASK:> Returns name of MPS device that will be used, else None. <END_TASK> <USER_TASK:> Description: def mps_device_name(): """ Returns name of MPS device that will be used, else None. """
lib = _load_tcmps_lib() if lib is None: return None n = 256 c_name = (_ctypes.c_char * n)() ret = lib.TCMPSMetalDeviceName(_ctypes.byref(c_name), _ctypes.c_int32(n)) if ret == 0: return _decode_bytes_to_native_string(c_name.value) else: return None
<SYSTEM_TASK:> Returns the memory size in bytes that can be effectively allocated on the <END_TASK> <USER_TASK:> Description: def mps_device_memory_limit(): """ Returns the memory size in bytes that can be effectively allocated on the MPS device that will be used, or None if no suitable device is available. """
lib = _load_tcmps_lib() if lib is None: return None c_size = _ctypes.c_uint64() ret = lib.TCMPSMetalDeviceMemoryLimit(_ctypes.byref(c_size)) return c_size.value if ret == 0 else None
<SYSTEM_TASK:> Copy the shape from TCMPS as a new numpy ndarray. <END_TASK> <USER_TASK:> Description: def shape(self): """Copy the shape from TCMPS as a new numpy ndarray."""
# Create C variables that will serve as out parameters for TCMPS. shape_ptr = _ctypes.POINTER(_ctypes.c_size_t)() # size_t* shape_ptr dim = _ctypes.c_size_t() # size_t dim # Obtain pointer into memory owned by the C++ object self.handle. status_code = self._LIB.TCMPSGetFloatArrayShape( self.handle, _ctypes.byref(shape_ptr), _ctypes.byref(dim)) assert status_code == 0, "Error calling TCMPSGetFloatArrayShape" return _shape_tuple_from_ctypes(shape_ptr, dim)
<SYSTEM_TASK:> Copy the data from TCMPS into a new numpy ndarray <END_TASK> <USER_TASK:> Description: def asnumpy(self): """Copy the data from TCMPS into a new numpy ndarray"""
# Create C variables that will serve as out parameters for TCMPS. data_ptr = _ctypes.POINTER(_ctypes.c_float)() # float* data_ptr shape_ptr = _ctypes.POINTER(_ctypes.c_size_t)() # size_t* shape_ptr dim = _ctypes.c_size_t() # size_t dim # Obtain pointers into memory owned by the C++ object self.handle. # Note that this may trigger synchronization with another thread # producing the data. status_code = self._LIB.TCMPSReadFloatArray( self.handle, _ctypes.byref(data_ptr), _ctypes.byref(shape_ptr), _ctypes.byref(dim)) assert status_code == 0, "Error calling TCMPSReadFloatArray" return _numpy_array_from_ctypes(data_ptr, shape_ptr, dim)
<SYSTEM_TASK:> Gets SNS Message, deserialises the message, <END_TASK> <USER_TASK:> Description: def route_sns_task(event, context): """ Gets SNS Message, deserialises the message, imports the function, calls the function with args """
record = event['Records'][0] message = json.loads( record['Sns']['Message'] ) return run_message(message)
<SYSTEM_TASK:> Async task decorator so that running <END_TASK> <USER_TASK:> Description: def task(*args, **kwargs): """Async task decorator so that running Args: func (function): the function to be wrapped Further requirements: func must be an independent top-level function. i.e. not a class method or an anonymous function service (str): either 'lambda' or 'sns' remote_aws_lambda_function_name (str): the name of a remote lambda function to call with this task remote_aws_region (str): the name of a remote region to make lambda/sns calls against Returns: A replacement function that dispatches func() to run asynchronously through the service in question """
func = None if len(args) == 1 and callable(args[0]): func = args[0] if not kwargs: # Default Values service = 'lambda' lambda_function_name_arg = None aws_region_arg = None else: # Arguments were passed service = kwargs.get('service', 'lambda') lambda_function_name_arg = kwargs.get('remote_aws_lambda_function_name') aws_region_arg = kwargs.get('remote_aws_region') capture_response = kwargs.get('capture_response', False) def func_wrapper(func): task_path = get_func_task_path(func) @wraps(func) def _run_async(*args, **kwargs): """ This is the wrapping async function that replaces the function that is decorated with @task. Args: These are just passed through to @task's func Assuming a valid service is passed to task() and it is run inside a Lambda process (i.e. AWS_LAMBDA_FUNCTION_NAME exists), it dispatches the function to be run through the service variable. Otherwise, it runs the task synchronously. Returns: In async mode, the object returned includes state of the dispatch. For instance When outside of Lambda, the func passed to @task is run and we return the actual value. """ lambda_function_name = lambda_function_name_arg or os.environ.get('AWS_LAMBDA_FUNCTION_NAME') aws_region = aws_region_arg or os.environ.get('AWS_REGION') if (service in ASYNC_CLASSES) and (lambda_function_name): send_result = ASYNC_CLASSES[service](lambda_function_name=lambda_function_name, aws_region=aws_region, capture_response=capture_response).send(task_path, args, kwargs) return send_result else: return func(*args, **kwargs) update_wrapper(_run_async, func) _run_async.service = service _run_async.sync = func return _run_async return func_wrapper(func) if func else func_wrapper
<SYSTEM_TASK:> Format the modular task path for a function via inspection. <END_TASK> <USER_TASK:> Description: def get_func_task_path(func): """ Format the modular task path for a function via inspection. """
module_path = inspect.getmodule(func).__name__ task_path = '{module_path}.{func_name}'.format( module_path=module_path, func_name=func.__name__ ) return task_path
<SYSTEM_TASK:> Get the response from the async table <END_TASK> <USER_TASK:> Description: def get_async_response(response_id): """ Get the response from the async table """
response = DYNAMODB_CLIENT.get_item( TableName=ASYNC_RESPONSE_TABLE, Key={'id': {'S': str(response_id)}} ) if 'Item' not in response: return None return { 'status': response['Item']['async_status']['S'], 'response': json.loads(response['Item']['async_response']['S']), }
<SYSTEM_TASK:> Create the message object and pass it to the actual sender. <END_TASK> <USER_TASK:> Description: def send(self, task_path, args, kwargs): """ Create the message object and pass it to the actual sender. """
message = { 'task_path': task_path, 'capture_response': self.capture_response, 'response_id': self.response_id, 'args': args, 'kwargs': kwargs } self._send(message) return self
<SYSTEM_TASK:> Given a message, directly invoke the lamdba function for this task. <END_TASK> <USER_TASK:> Description: def _send(self, message): """ Given a message, directly invoke the lamdba function for this task. """
message['command'] = 'zappa.asynchronous.route_lambda_task' payload = json.dumps(message).encode('utf-8') if len(payload) > LAMBDA_ASYNC_PAYLOAD_LIMIT: # pragma: no cover raise AsyncException("Payload too large for async Lambda call") self.response = self.client.invoke( FunctionName=self.lambda_function_name, InvocationType='Event', #makes the call async Payload=payload ) self.sent = (self.response.get('StatusCode', 0) == 202)
<SYSTEM_TASK:> Given a message, publish to this topic. <END_TASK> <USER_TASK:> Description: def _send(self, message): """ Given a message, publish to this topic. """
message['command'] = 'zappa.asynchronous.route_sns_task' payload = json.dumps(message).encode('utf-8') if len(payload) > LAMBDA_ASYNC_PAYLOAD_LIMIT: # pragma: no cover raise AsyncException("Payload too large for SNS") self.response = self.client.publish( TargetArn=self.arn, Message=payload ) self.sent = self.response.get('MessageId')
<SYSTEM_TASK:> Accepts a str, returns an int timestamp. <END_TASK> <USER_TASK:> Description: def string_to_timestamp(timestring): """ Accepts a str, returns an int timestamp. """
ts = None # Uses an extended version of Go's duration string. try: delta = durationpy.from_str(timestring); past = datetime.datetime.utcnow() - delta ts = calendar.timegm(past.timetuple()) return ts except Exception as e: pass if ts: return ts # else: # print("Unable to parse timestring.") return 0
<SYSTEM_TASK:> Automatically try to discover Django settings files, <END_TASK> <USER_TASK:> Description: def detect_django_settings(): """ Automatically try to discover Django settings files, return them as relative module paths. """
matches = [] for root, dirnames, filenames in os.walk(os.getcwd()): for filename in fnmatch.filter(filenames, '*settings.py'): full = os.path.join(root, filename) if 'site-packages' in full: continue full = os.path.join(root, filename) package_path = full.replace(os.getcwd(), '') package_module = package_path.replace(os.sep, '.').split('.', 1)[1].replace('.py', '') matches.append(package_module) return matches
<SYSTEM_TASK:> Automatically try to discover Flask apps files, <END_TASK> <USER_TASK:> Description: def detect_flask_apps(): """ Automatically try to discover Flask apps files, return them as relative module paths. """
matches = [] for root, dirnames, filenames in os.walk(os.getcwd()): for filename in fnmatch.filter(filenames, '*.py'): full = os.path.join(root, filename) if 'site-packages' in full: continue full = os.path.join(root, filename) with io.open(full, 'r', encoding='utf-8') as f: lines = f.readlines() for line in lines: app = None # Kind of janky.. if '= Flask(' in line: app = line.split('= Flask(')[0].strip() if '=Flask(' in line: app = line.split('=Flask(')[0].strip() if not app: continue package_path = full.replace(os.getcwd(), '') package_module = package_path.replace(os.sep, '.').split('.', 1)[1].replace('.py', '') app_module = package_module + '.' + app matches.append(app_module) return matches
<SYSTEM_TASK:> Checks if a newer version of Zappa is available. <END_TASK> <USER_TASK:> Description: def check_new_version_available(this_version): """ Checks if a newer version of Zappa is available. Returns True is updateable, else False. """
import requests pypi_url = 'https://pypi.python.org/pypi/Zappa/json' resp = requests.get(pypi_url, timeout=1.5) top_version = resp.json()['info']['version'] return this_version != top_version
<SYSTEM_TASK:> Checks if a directory lies in the same directory as a .py file with the same name. <END_TASK> <USER_TASK:> Description: def conflicts_with_a_neighbouring_module(directory_path): """ Checks if a directory lies in the same directory as a .py file with the same name. """
parent_dir_path, current_dir_name = os.path.split(os.path.normpath(directory_path)) neighbours = os.listdir(parent_dir_path) conflicting_neighbour_filename = current_dir_name+'.py' return conflicting_neighbour_filename in neighbours
<SYSTEM_TASK:> Given the WSGI environ and the response, <END_TASK> <USER_TASK:> Description: def common_log(environ, response, response_time=None): """ Given the WSGI environ and the response, log this event in Common Log Format. """
logger = logging.getLogger() if response_time: formatter = ApacheFormatter(with_response_time=True) try: log_entry = formatter(response.status_code, environ, len(response.content), rt_us=response_time) except TypeError: # Upstream introduced a very annoying breaking change on the rt_ms/rt_us kwarg. log_entry = formatter(response.status_code, environ, len(response.content), rt_ms=response_time) else: formatter = ApacheFormatter(with_response_time=False) log_entry = formatter(response.status_code, environ, len(response.content)) logger.info(log_entry) return log_entry
<SYSTEM_TASK:> Attempt to read a file from s3 containing a flat json object. Adds each <END_TASK> <USER_TASK:> Description: def load_remote_settings(self, remote_bucket, remote_file): """ Attempt to read a file from s3 containing a flat json object. Adds each key->value pair as environment variables. Helpful for keeping sensitiZve or stage-specific configuration variables in s3 instead of version control. """
if not self.session: boto_session = boto3.Session() else: boto_session = self.session s3 = boto_session.resource('s3') try: remote_env_object = s3.Object(remote_bucket, remote_file).get() except Exception as e: # pragma: no cover # catch everything aws might decide to raise print('Could not load remote settings file.', e) return try: content = remote_env_object['Body'].read() except Exception as e: # pragma: no cover # catch everything aws might decide to raise print('Exception while reading remote settings file.', e) return try: settings_dict = json.loads(content) except (ValueError, TypeError): # pragma: no cover print('Failed to parse remote settings!') return # add each key-value to environment - overwrites existing keys! for key, value in settings_dict.items(): if self.settings.LOG_LEVEL == "DEBUG": print('Adding {} -> {} to environment'.format( key, value )) # Environment variable keys can't be Unicode # https://github.com/Miserlou/Zappa/issues/604 try: os.environ[str(key)] = value except Exception: if self.settings.LOG_LEVEL == "DEBUG": print("Environment variable keys must be non-unicode!")
<SYSTEM_TASK:> Given a function and event context, <END_TASK> <USER_TASK:> Description: def run_function(app_function, event, context): """ Given a function and event context, detect signature and execute, returning any result. """
# getargspec does not support python 3 method with type hints # Related issue: https://github.com/Miserlou/Zappa/issues/1452 if hasattr(inspect, "getfullargspec"): # Python 3 args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec(app_function) else: # Python 2 args, varargs, keywords, defaults = inspect.getargspec(app_function) num_args = len(args) if num_args == 0: result = app_function(event, context) if varargs else app_function() elif num_args == 1: result = app_function(event, context) if varargs else app_function(event) elif num_args == 2: result = app_function(event, context) else: raise RuntimeError("Function signature is invalid. Expected a function that accepts at most " "2 arguments or varargs.") return result
<SYSTEM_TASK:> Get the associated function to execute for a triggered AWS event <END_TASK> <USER_TASK:> Description: def get_function_for_aws_event(self, record): """ Get the associated function to execute for a triggered AWS event Support S3, SNS, DynamoDB, kinesis and SQS events """
if 's3' in record: if ':' in record['s3']['configurationId']: return record['s3']['configurationId'].split(':')[-1] arn = None if 'Sns' in record: try: message = json.loads(record['Sns']['Message']) if message.get('command'): return message['command'] except ValueError: pass arn = record['Sns'].get('TopicArn') elif 'dynamodb' in record or 'kinesis' in record: arn = record.get('eventSourceARN') elif 'eventSource' in record and record.get('eventSource') == 'aws:sqs': arn = record.get('eventSourceARN') elif 's3' in record: arn = record['s3']['bucket']['arn'] if arn: return self.settings.AWS_EVENT_MAPPING.get(arn) return None
<SYSTEM_TASK:> For the given event build ARN and return the configured function <END_TASK> <USER_TASK:> Description: def get_function_from_bot_intent_trigger(self, event): """ For the given event build ARN and return the configured function """
intent = event.get('currentIntent') if intent: intent = intent.get('name') if intent: return self.settings.AWS_BOT_EVENT_MAPPING.get( "{}:{}".format(intent, event.get('invocationSource')) )
<SYSTEM_TASK:> Get the associated function to execute for a cognito trigger <END_TASK> <USER_TASK:> Description: def get_function_for_cognito_trigger(self, trigger): """ Get the associated function to execute for a cognito trigger """
print("get_function_for_cognito_trigger", self.settings.COGNITO_TRIGGER_MAPPING, trigger, self.settings.COGNITO_TRIGGER_MAPPING.get(trigger)) return self.settings.COGNITO_TRIGGER_MAPPING.get(trigger)
<SYSTEM_TASK:> Adds a method to the internal lists of allowed or denied methods. Each object in <END_TASK> <USER_TASK:> Description: def _addMethod(self, effect, verb, resource, conditions): """Adds a method to the internal lists of allowed or denied methods. Each object in the internal list contains a resource ARN and a condition statement. The condition statement can be null."""
if verb != "*" and not hasattr(HttpVerb, verb): raise NameError("Invalid HTTP verb " + verb + ". Allowed verbs in HttpVerb class") resourcePattern = re.compile(self.pathRegex) if not resourcePattern.match(resource): raise NameError("Invalid resource path: " + resource + ". Path should match " + self.pathRegex) if resource[:1] == "/": resource = resource[1:] resourceArn = ("arn:aws:execute-api:" + self.region + ":" + self.awsAccountId + ":" + self.restApiId + "/" + self.stage + "/" + verb + "/" + resource) if effect.lower() == "allow": self.allowMethods.append({ 'resourceArn' : resourceArn, 'conditions' : conditions }) elif effect.lower() == "deny": self.denyMethods.append({ 'resourceArn' : resourceArn, 'conditions' : conditions })
<SYSTEM_TASK:> A wrapper to apply configuration options to boto clients <END_TASK> <USER_TASK:> Description: def boto_client(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto clients"""
return self.boto_session.client(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs))
<SYSTEM_TASK:> A wrapper to apply configuration options to boto resources <END_TASK> <USER_TASK:> Description: def boto_resource(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto resources"""
return self.boto_session.resource(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs))
<SYSTEM_TASK:> For a given package, returns a list of required packages. Recursive. <END_TASK> <USER_TASK:> Description: def get_deps_list(self, pkg_name, installed_distros=None): """ For a given package, returns a list of required packages. Recursive. """
# https://github.com/Miserlou/Zappa/issues/1478. Using `pkg_resources` # instead of `pip` is the recommended approach. The usage is nearly # identical. import pkg_resources deps = [] if not installed_distros: installed_distros = pkg_resources.WorkingSet() for package in installed_distros: if package.project_name.lower() == pkg_name.lower(): deps = [(package.project_name, package.version)] for req in package.requires(): deps += self.get_deps_list(pkg_name=req.project_name, installed_distros=installed_distros) return list(set(deps))
<SYSTEM_TASK:> Takes the installed zappa and brings it into a fresh virtualenv-like folder. All dependencies are then downloaded. <END_TASK> <USER_TASK:> Description: def create_handler_venv(self): """ Takes the installed zappa and brings it into a fresh virtualenv-like folder. All dependencies are then downloaded. """
import subprocess # We will need the currenv venv to pull Zappa from current_venv = self.get_current_venv() # Make a new folder for the handler packages ve_path = os.path.join(os.getcwd(), 'handler_venv') if os.sys.platform == 'win32': current_site_packages_dir = os.path.join(current_venv, 'Lib', 'site-packages') venv_site_packages_dir = os.path.join(ve_path, 'Lib', 'site-packages') else: current_site_packages_dir = os.path.join(current_venv, 'lib', get_venv_from_python_version(), 'site-packages') venv_site_packages_dir = os.path.join(ve_path, 'lib', get_venv_from_python_version(), 'site-packages') if not os.path.isdir(venv_site_packages_dir): os.makedirs(venv_site_packages_dir) # Copy zappa* to the new virtualenv zappa_things = [z for z in os.listdir(current_site_packages_dir) if z.lower()[:5] == 'zappa'] for z in zappa_things: copytree(os.path.join(current_site_packages_dir, z), os.path.join(venv_site_packages_dir, z)) # Use pip to download zappa's dependencies. Copying from current venv causes issues with things like PyYAML that installs as yaml zappa_deps = self.get_deps_list('zappa') pkg_list = ['{0!s}=={1!s}'.format(dep, version) for dep, version in zappa_deps] # Need to manually add setuptools pkg_list.append('setuptools') command = ["pip", "install", "--quiet", "--target", venv_site_packages_dir] + pkg_list # This is the recommended method for installing packages if you don't # to depend on `setuptools` # https://github.com/pypa/pip/issues/5240#issuecomment-381662679 pip_process = subprocess.Popen(command, stdout=subprocess.PIPE) # Using communicate() to avoid deadlocks pip_process.communicate() pip_return_code = pip_process.returncode if pip_return_code: raise EnvironmentError("Pypi lookup failed") return ve_path
<SYSTEM_TASK:> Returns the path to the current virtualenv <END_TASK> <USER_TASK:> Description: def get_current_venv(): """ Returns the path to the current virtualenv """
if 'VIRTUAL_ENV' in os.environ: venv = os.environ['VIRTUAL_ENV'] elif os.path.exists('.python-version'): # pragma: no cover try: subprocess.check_output(['pyenv', 'help'], stderr=subprocess.STDOUT) except OSError: print("This directory seems to have pyenv's local venv, " "but pyenv executable was not found.") with open('.python-version', 'r') as f: # minor fix in how .python-version is read # Related: https://github.com/Miserlou/Zappa/issues/921 env_name = f.readline().strip() bin_path = subprocess.check_output(['pyenv', 'which', 'python']).decode('utf-8') venv = bin_path[:bin_path.rfind(env_name)] + env_name else: # pragma: no cover return None return venv
<SYSTEM_TASK:> Extracts the lambda package into a given path. Assumes the package exists in lambda packages. <END_TASK> <USER_TASK:> Description: def extract_lambda_package(self, package_name, path): """ Extracts the lambda package into a given path. Assumes the package exists in lambda packages. """
lambda_package = lambda_packages[package_name][self.runtime] # Trash the local version to help with package space saving shutil.rmtree(os.path.join(path, package_name), ignore_errors=True) tar = tarfile.open(lambda_package['path'], mode="r:gz") for member in tar.getmembers(): tar.extract(member, path)
<SYSTEM_TASK:> Returns a dict of installed packages that Zappa cares about. <END_TASK> <USER_TASK:> Description: def get_installed_packages(site_packages, site_packages_64): """ Returns a dict of installed packages that Zappa cares about. """
import pkg_resources package_to_keep = [] if os.path.isdir(site_packages): package_to_keep += os.listdir(site_packages) if os.path.isdir(site_packages_64): package_to_keep += os.listdir(site_packages_64) package_to_keep = [x.lower() for x in package_to_keep] installed_packages = {package.project_name.lower(): package.version for package in pkg_resources.WorkingSet() if package.project_name.lower() in package_to_keep or package.location.lower() in [site_packages.lower(), site_packages_64.lower()]} return installed_packages
<SYSTEM_TASK:> Checks if a given package version binary should be copied over from lambda packages. <END_TASK> <USER_TASK:> Description: def have_correct_lambda_package_version(self, package_name, package_version): """ Checks if a given package version binary should be copied over from lambda packages. package_name should be lower-cased version of package name. """
lambda_package_details = lambda_packages.get(package_name, {}).get(self.runtime) if lambda_package_details is None: return False # Binaries can be compiled for different package versions # Related: https://github.com/Miserlou/Zappa/issues/800 if package_version != lambda_package_details['version']: return False return True
<SYSTEM_TASK:> Gets the locally stored version of a manylinux wheel. If one does not exist, the function downloads it. <END_TASK> <USER_TASK:> Description: def get_cached_manylinux_wheel(self, package_name, package_version, disable_progress=False): """ Gets the locally stored version of a manylinux wheel. If one does not exist, the function downloads it. """
cached_wheels_dir = os.path.join(tempfile.gettempdir(), 'cached_wheels') if not os.path.isdir(cached_wheels_dir): os.makedirs(cached_wheels_dir) wheel_file = '{0!s}-{1!s}-{2!s}'.format(package_name, package_version, self.manylinux_wheel_file_suffix) wheel_path = os.path.join(cached_wheels_dir, wheel_file) if not os.path.exists(wheel_path) or not zipfile.is_zipfile(wheel_path): # The file is not cached, download it. wheel_url = self.get_manylinux_wheel_url(package_name, package_version) if not wheel_url: return None print(" - {}=={}: Downloading".format(package_name, package_version)) with open(wheel_path, 'wb') as f: self.download_url_with_progress(wheel_url, f, disable_progress) if not zipfile.is_zipfile(wheel_path): return None else: print(" - {}=={}: Using locally cached manylinux wheel".format(package_name, package_version)) return wheel_path
<SYSTEM_TASK:> For a given package name, returns a link to the download URL, <END_TASK> <USER_TASK:> Description: def get_manylinux_wheel_url(self, package_name, package_version): """ For a given package name, returns a link to the download URL, else returns None. Related: https://github.com/Miserlou/Zappa/issues/398 Examples here: https://gist.github.com/perrygeo/9545f94eaddec18a65fd7b56880adbae This function downloads metadata JSON of `package_name` from Pypi and examines if the package has a manylinux wheel. This function also caches the JSON file so that we don't have to poll Pypi every time. """
cached_pypi_info_dir = os.path.join(tempfile.gettempdir(), 'cached_pypi_info') if not os.path.isdir(cached_pypi_info_dir): os.makedirs(cached_pypi_info_dir) # Even though the metadata is for the package, we save it in a # filename that includes the package's version. This helps in # invalidating the cached file if the user moves to a different # version of the package. # Related: https://github.com/Miserlou/Zappa/issues/899 json_file = '{0!s}-{1!s}.json'.format(package_name, package_version) json_file_path = os.path.join(cached_pypi_info_dir, json_file) if os.path.exists(json_file_path): with open(json_file_path, 'rb') as metafile: data = json.load(metafile) else: url = 'https://pypi.python.org/pypi/{}/json'.format(package_name) try: res = requests.get(url, timeout=float(os.environ.get('PIP_TIMEOUT', 1.5))) data = res.json() except Exception as e: # pragma: no cover return None with open(json_file_path, 'wb') as metafile: jsondata = json.dumps(data) metafile.write(bytes(jsondata, "utf-8")) if package_version not in data['releases']: return None for f in data['releases'][package_version]: if f['filename'].endswith(self.manylinux_wheel_file_suffix): return f['url'] return None
<SYSTEM_TASK:> Copies src file to destination within a bucket. <END_TASK> <USER_TASK:> Description: def copy_on_s3(self, src_file_name, dst_file_name, bucket_name): """ Copies src file to destination within a bucket. """
try: self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: # pragma: no cover # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response['Error']['Code']) if error_code == 404: return False copy_src = { "Bucket": bucket_name, "Key": src_file_name } try: self.s3_client.copy( CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name ) return True except botocore.exceptions.ClientError: # pragma: no cover return False
<SYSTEM_TASK:> Given a file name and a bucket, remove it from S3. <END_TASK> <USER_TASK:> Description: def remove_from_s3(self, file_name, bucket_name): """ Given a file name and a bucket, remove it from S3. There's no reason to keep the file hosted on S3 once its been made into a Lambda function, so we can delete it from S3. Returns True on success, False on failure. """
try: self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: # pragma: no cover # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response['Error']['Code']) if error_code == 404: return False try: self.s3_client.delete_object(Bucket=bucket_name, Key=file_name) return True except (botocore.exceptions.ParamValidationError, botocore.exceptions.ClientError): # pragma: no cover return False
<SYSTEM_TASK:> Given an existing function ARN, update the configuration variables. <END_TASK> <USER_TASK:> Description: def update_lambda_configuration( self, lambda_arn, function_name, handler, description='Zappa Deployment', timeout=30, memory_size=512, publish=True, vpc_config=None, runtime='python2.7', aws_environment_variables=None, aws_kms_key_arn=None ): """ Given an existing function ARN, update the configuration variables. """
print("Updating Lambda function configuration..") if not vpc_config: vpc_config = {} if not self.credentials_arn: self.get_credentials_arn() if not aws_kms_key_arn: aws_kms_key_arn = '' if not aws_environment_variables: aws_environment_variables = {} # Check if there are any remote aws lambda env vars so they don't get trashed. # https://github.com/Miserlou/Zappa/issues/987, Related: https://github.com/Miserlou/Zappa/issues/765 lambda_aws_config = self.lambda_client.get_function_configuration(FunctionName=function_name) if "Environment" in lambda_aws_config: lambda_aws_environment_variables = lambda_aws_config["Environment"].get("Variables", {}) # Append keys that are remote but not in settings file for key, value in lambda_aws_environment_variables.items(): if key not in aws_environment_variables: aws_environment_variables[key] = value response = self.lambda_client.update_function_configuration( FunctionName=function_name, Runtime=runtime, Role=self.credentials_arn, Handler=handler, Description=description, Timeout=timeout, MemorySize=memory_size, VpcConfig=vpc_config, Environment={'Variables': aws_environment_variables}, KMSKeyArn=aws_kms_key_arn, TracingConfig={ 'Mode': 'Active' if self.xray_tracing else 'PassThrough' } ) resource_arn = response['FunctionArn'] if self.tags: self.lambda_client.tag_resource(Resource=resource_arn, Tags=self.tags) return resource_arn
<SYSTEM_TASK:> Directly invoke a named Lambda function with a payload. <END_TASK> <USER_TASK:> Description: def invoke_lambda_function( self, function_name, payload, invocation_type='Event', log_type='Tail', client_context=None, qualifier=None ): """ Directly invoke a named Lambda function with a payload. Returns the response. """
return self.lambda_client.invoke( FunctionName=function_name, InvocationType=invocation_type, LogType=log_type, Payload=payload )
<SYSTEM_TASK:> Rollback the lambda function code 'versions_back' number of revisions. <END_TASK> <USER_TASK:> Description: def rollback_lambda_function_version(self, function_name, versions_back=1, publish=True): """ Rollback the lambda function code 'versions_back' number of revisions. Returns the Function ARN. """
response = self.lambda_client.list_versions_by_function(FunctionName=function_name) # Take into account $LATEST if len(response['Versions']) < versions_back + 1: print("We do not have {} revisions. Aborting".format(str(versions_back))) return False revisions = [int(revision['Version']) for revision in response['Versions'] if revision['Version'] != '$LATEST'] revisions.sort(reverse=True) response = self.lambda_client.get_function(FunctionName='function:{}:{}'.format(function_name, revisions[versions_back])) response = requests.get(response['Code']['Location']) if response.status_code != 200: print("Failed to get version {} of {} code".format(versions_back, function_name)) return False response = self.lambda_client.update_function_code(FunctionName=function_name, ZipFile=response.content, Publish=publish) # pragma: no cover return response['FunctionArn']
<SYSTEM_TASK:> Returns the lambda function ARN, given a name <END_TASK> <USER_TASK:> Description: def get_lambda_function(self, function_name): """ Returns the lambda function ARN, given a name This requires the "lambda:GetFunction" role. """
response = self.lambda_client.get_function( FunctionName=function_name) return response['Configuration']['FunctionArn']
<SYSTEM_TASK:> Simply returns the versions available for a Lambda function, given a function name. <END_TASK> <USER_TASK:> Description: def get_lambda_function_versions(self, function_name): """ Simply returns the versions available for a Lambda function, given a function name. """
try: response = self.lambda_client.list_versions_by_function( FunctionName=function_name ) return response.get('Versions', []) except Exception: return []
<SYSTEM_TASK:> Create the API Gateway for this Zappa deployment. <END_TASK> <USER_TASK:> Description: def create_api_gateway_routes( self, lambda_arn, api_name=None, api_key_required=False, authorization_type='NONE', authorizer=None, cors_options=None, description=None, endpoint_configuration=None ): """ Create the API Gateway for this Zappa deployment. Returns the new RestAPI CF resource. """
restapi = troposphere.apigateway.RestApi('Api') restapi.Name = api_name or lambda_arn.split(':')[-1] if not description: description = 'Created automatically by Zappa.' restapi.Description = description endpoint_configuration = [] if endpoint_configuration is None else endpoint_configuration if self.boto_session.region_name == "us-gov-west-1": endpoint_configuration.append("REGIONAL") if endpoint_configuration: endpoint = troposphere.apigateway.EndpointConfiguration() endpoint.Types = list(set(endpoint_configuration)) restapi.EndpointConfiguration = endpoint if self.apigateway_policy: restapi.Policy = json.loads(self.apigateway_policy) self.cf_template.add_resource(restapi) root_id = troposphere.GetAtt(restapi, 'RootResourceId') invocation_prefix = "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov" invocations_uri = 'arn:' + invocation_prefix + ':apigateway:' + self.boto_session.region_name + ':lambda:path/2015-03-31/functions/' + lambda_arn + '/invocations' ## # The Resources ## authorizer_resource = None if authorizer: authorizer_lambda_arn = authorizer.get('arn', lambda_arn) lambda_uri = 'arn:{invocation_prefix}:apigateway:{region_name}:lambda:path/2015-03-31/functions/{lambda_arn}/invocations'.format( invocation_prefix=invocation_prefix, region_name=self.boto_session.region_name, lambda_arn=authorizer_lambda_arn ) authorizer_resource = self.create_authorizer( restapi, lambda_uri, authorizer ) self.create_and_setup_methods( restapi, root_id, api_key_required, invocations_uri, authorization_type, authorizer_resource, 0 ) if cors_options: self.create_and_setup_cors( restapi, root_id, invocations_uri, 0, cors_options ) resource = troposphere.apigateway.Resource('ResourceAnyPathSlashed') self.cf_api_resources.append(resource.title) resource.RestApiId = troposphere.Ref(restapi) resource.ParentId = root_id resource.PathPart = "{proxy+}" self.cf_template.add_resource(resource) self.create_and_setup_methods( restapi, resource, api_key_required, invocations_uri, authorization_type, authorizer_resource, 1 ) # pragma: no cover if cors_options: self.create_and_setup_cors( restapi, resource, invocations_uri, 1, cors_options ) # pragma: no cover return restapi
<SYSTEM_TASK:> Create Authorizer for API gateway <END_TASK> <USER_TASK:> Description: def create_authorizer(self, restapi, uri, authorizer): """ Create Authorizer for API gateway """
authorizer_type = authorizer.get("type", "TOKEN").upper() identity_validation_expression = authorizer.get('validation_expression', None) authorizer_resource = troposphere.apigateway.Authorizer("Authorizer") authorizer_resource.RestApiId = troposphere.Ref(restapi) authorizer_resource.Name = authorizer.get("name", "ZappaAuthorizer") authorizer_resource.Type = authorizer_type authorizer_resource.AuthorizerUri = uri authorizer_resource.IdentitySource = "method.request.header.%s" % authorizer.get('token_header', 'Authorization') if identity_validation_expression: authorizer_resource.IdentityValidationExpression = identity_validation_expression if authorizer_type == 'TOKEN': if not self.credentials_arn: self.get_credentials_arn() authorizer_resource.AuthorizerResultTtlInSeconds = authorizer.get('result_ttl', 300) authorizer_resource.AuthorizerCredentials = self.credentials_arn if authorizer_type == 'COGNITO_USER_POOLS': authorizer_resource.ProviderARNs = authorizer.get('provider_arns') self.cf_api_resources.append(authorizer_resource.title) self.cf_template.add_resource(authorizer_resource) return authorizer_resource
<SYSTEM_TASK:> Generator that allows to iterate per API keys associated to an api_id and a stage_name. <END_TASK> <USER_TASK:> Description: def get_api_keys(self, api_id, stage_name): """ Generator that allows to iterate per API keys associated to an api_id and a stage_name. """
response = self.apigateway_client.get_api_keys(limit=500) stage_key = '{}/{}'.format(api_id, stage_name) for api_key in response.get('items'): if stage_key in api_key.get('stageKeys'): yield api_key.get('id')
<SYSTEM_TASK:> Create new API key and link it with an api_id and a stage_name <END_TASK> <USER_TASK:> Description: def create_api_key(self, api_id, stage_name): """ Create new API key and link it with an api_id and a stage_name """
response = self.apigateway_client.create_api_key( name='{}_{}'.format(stage_name, api_id), description='Api Key for {}'.format(api_id), enabled=True, stageKeys=[ { 'restApiId': '{}'.format(api_id), 'stageName': '{}'.format(stage_name) }, ] ) print('Created a new x-api-key: {}'.format(response['id']))
<SYSTEM_TASK:> Remove a generated API key for api_id and stage_name <END_TASK> <USER_TASK:> Description: def remove_api_key(self, api_id, stage_name): """ Remove a generated API key for api_id and stage_name """
response = self.apigateway_client.get_api_keys( limit=1, nameQuery='{}_{}'.format(stage_name, api_id) ) for api_key in response.get('items'): self.apigateway_client.delete_api_key( apiKey="{}".format(api_key['id']) )
<SYSTEM_TASK:> Return an object that describes a change of configuration on the given staging. <END_TASK> <USER_TASK:> Description: def get_patch_op(self, keypath, value, op='replace'): """ Return an object that describes a change of configuration on the given staging. Setting will be applied on all available HTTP methods. """
if isinstance(value, bool): value = str(value).lower() return {'op': op, 'path': '/*/*/{}'.format(keypath), 'value': value}
<SYSTEM_TASK:> Generator that allows to iterate per every available apis. <END_TASK> <USER_TASK:> Description: def get_rest_apis(self, project_name): """ Generator that allows to iterate per every available apis. """
all_apis = self.apigateway_client.get_rest_apis( limit=500 ) for api in all_apis['items']: if api['name'] != project_name: continue yield api
<SYSTEM_TASK:> Delete the CF stack managed by Zappa. <END_TASK> <USER_TASK:> Description: def delete_stack(self, name, wait=False): """ Delete the CF stack managed by Zappa. """
try: stack = self.cf_client.describe_stacks(StackName=name)['Stacks'][0] except: # pragma: no cover print('No Zappa stack named {0}'.format(name)) return False tags = {x['Key']:x['Value'] for x in stack['Tags']} if tags.get('ZappaProject') == name: self.cf_client.delete_stack(StackName=name) if wait: waiter = self.cf_client.get_waiter('stack_delete_complete') print('Waiting for stack {0} to be deleted..'.format(name)) waiter.wait(StackName=name) return True else: print('ZappaProject tag not found on {0}, doing nothing'.format(name)) return False
<SYSTEM_TASK:> Build the entire CF stack. <END_TASK> <USER_TASK:> Description: def create_stack_template( self, lambda_arn, lambda_name, api_key_required, iam_authorization, authorizer, cors_options=None, description=None, endpoint_configuration=None ): """ Build the entire CF stack. Just used for the API Gateway, but could be expanded in the future. """
auth_type = "NONE" if iam_authorization and authorizer: logger.warn("Both IAM Authorization and Authorizer are specified, this is not possible. " "Setting Auth method to IAM Authorization") authorizer = None auth_type = "AWS_IAM" elif iam_authorization: auth_type = "AWS_IAM" elif authorizer: auth_type = authorizer.get("type", "CUSTOM") # build a fresh template self.cf_template = troposphere.Template() self.cf_template.add_description('Automatically generated with Zappa') self.cf_api_resources = [] self.cf_parameters = {} restapi = self.create_api_gateway_routes( lambda_arn, api_name=lambda_name, api_key_required=api_key_required, authorization_type=auth_type, authorizer=authorizer, cors_options=cors_options, description=description, endpoint_configuration=endpoint_configuration ) return self.cf_template
<SYSTEM_TASK:> Given a name, describes CloudFront stacks and returns dict of the stack Outputs <END_TASK> <USER_TASK:> Description: def stack_outputs(self, name): """ Given a name, describes CloudFront stacks and returns dict of the stack Outputs , else returns an empty dict. """
try: stack = self.cf_client.describe_stacks(StackName=name)['Stacks'][0] return {x['OutputKey']: x['OutputValue'] for x in stack['Outputs']} except botocore.client.ClientError: return {}
<SYSTEM_TASK:> Creates the API GW domain and returns the resulting DNS name. <END_TASK> <USER_TASK:> Description: def create_domain_name(self, domain_name, certificate_name, certificate_body=None, certificate_private_key=None, certificate_chain=None, certificate_arn=None, lambda_name=None, stage=None, base_path=None): """ Creates the API GW domain and returns the resulting DNS name. """
# This is a Let's Encrypt or custom certificate if not certificate_arn: agw_response = self.apigateway_client.create_domain_name( domainName=domain_name, certificateName=certificate_name, certificateBody=certificate_body, certificatePrivateKey=certificate_private_key, certificateChain=certificate_chain ) # This is an AWS ACM-hosted Certificate else: agw_response = self.apigateway_client.create_domain_name( domainName=domain_name, certificateName=certificate_name, certificateArn=certificate_arn ) api_id = self.get_api_id(lambda_name) if not api_id: raise LookupError("No API URL to certify found - did you deploy?") self.apigateway_client.create_base_path_mapping( domainName=domain_name, basePath='' if base_path is None else base_path, restApiId=api_id, stage=stage ) return agw_response['distributionDomainName']
<SYSTEM_TASK:> Updates Route53 Records following GW domain creation <END_TASK> <USER_TASK:> Description: def update_route53_records(self, domain_name, dns_name): """ Updates Route53 Records following GW domain creation """
zone_id = self.get_hosted_zone_id_for_domain(domain_name) is_apex = self.route53.get_hosted_zone(Id=zone_id)['HostedZone']['Name'][:-1] == domain_name if is_apex: record_set = { 'Name': domain_name, 'Type': 'A', 'AliasTarget': { 'HostedZoneId': 'Z2FDTNDATAQYW2', # This is a magic value that means "CloudFront" 'DNSName': dns_name, 'EvaluateTargetHealth': False } } else: record_set = { 'Name': domain_name, 'Type': 'CNAME', 'ResourceRecords': [ { 'Value': dns_name } ], 'TTL': 60 } # Related: https://github.com/boto/boto3/issues/157 # and: http://docs.aws.amazon.com/Route53/latest/APIReference/CreateAliasRRSAPI.html # and policy: https://spin.atomicobject.com/2016/04/28/route-53-hosted-zone-managment/ # pure_zone_id = zone_id.split('/hostedzone/')[1] # XXX: ClientError: An error occurred (InvalidChangeBatch) when calling the ChangeResourceRecordSets operation: # Tried to create an alias that targets d1awfeji80d0k2.cloudfront.net., type A in zone Z1XWOQP59BYF6Z, # but the alias target name does not lie within the target zone response = self.route53.change_resource_record_sets( HostedZoneId=zone_id, ChangeBatch={ 'Changes': [ { 'Action': 'UPSERT', 'ResourceRecordSet': record_set } ] } ) return response
<SYSTEM_TASK:> This updates your certificate information for an existing domain, <END_TASK> <USER_TASK:> Description: def update_domain_name(self, domain_name, certificate_name=None, certificate_body=None, certificate_private_key=None, certificate_chain=None, certificate_arn=None, lambda_name=None, stage=None, route53=True, base_path=None): """ This updates your certificate information for an existing domain, with similar arguments to boto's update_domain_name API Gateway api. It returns the resulting new domain information including the new certificate's ARN if created during this process. Previously, this method involved downtime that could take up to 40 minutes because the API Gateway api only allowed this by deleting, and then creating it. Related issues: https://github.com/Miserlou/Zappa/issues/590 https://github.com/Miserlou/Zappa/issues/588 https://github.com/Miserlou/Zappa/pull/458 https://github.com/Miserlou/Zappa/issues/882 https://github.com/Miserlou/Zappa/pull/883 """
print("Updating domain name!") certificate_name = certificate_name + str(time.time()) api_gateway_domain = self.apigateway_client.get_domain_name(domainName=domain_name) if not certificate_arn\ and certificate_body and certificate_private_key and certificate_chain: acm_certificate = self.acm_client.import_certificate(Certificate=certificate_body, PrivateKey=certificate_private_key, CertificateChain=certificate_chain) certificate_arn = acm_certificate['CertificateArn'] self.update_domain_base_path_mapping(domain_name, lambda_name, stage, base_path) return self.apigateway_client.update_domain_name(domainName=domain_name, patchOperations=[ {"op" : "replace", "path" : "/certificateName", "value" : certificate_name}, {"op" : "replace", "path" : "/certificateArn", "value" : certificate_arn} ])
<SYSTEM_TASK:> Update domain base path mapping on API Gateway if it was changed <END_TASK> <USER_TASK:> Description: def update_domain_base_path_mapping(self, domain_name, lambda_name, stage, base_path): """ Update domain base path mapping on API Gateway if it was changed """
api_id = self.get_api_id(lambda_name) if not api_id: print("Warning! Can't update base path mapping!") return base_path_mappings = self.apigateway_client.get_base_path_mappings(domainName=domain_name) found = False for base_path_mapping in base_path_mappings.get('items', []): if base_path_mapping['restApiId'] == api_id and base_path_mapping['stage'] == stage: found = True if base_path_mapping['basePath'] != base_path: self.apigateway_client.update_base_path_mapping(domainName=domain_name, basePath=base_path_mapping['basePath'], patchOperations=[ {"op" : "replace", "path" : "/basePath", "value" : '' if base_path is None else base_path} ]) if not found: self.apigateway_client.create_base_path_mapping( domainName=domain_name, basePath='' if base_path is None else base_path, restApiId=api_id, stage=stage )
<SYSTEM_TASK:> Same behaviour of list_host_zones, but transparently handling pagination. <END_TASK> <USER_TASK:> Description: def get_all_zones(self): """Same behaviour of list_host_zones, but transparently handling pagination."""
zones = {'HostedZones': []} new_zones = self.route53.list_hosted_zones(MaxItems='100') while new_zones['IsTruncated']: zones['HostedZones'] += new_zones['HostedZones'] new_zones = self.route53.list_hosted_zones(Marker=new_zones['NextMarker'], MaxItems='100') zones['HostedZones'] += new_zones['HostedZones'] return zones
<SYSTEM_TASK:> Scan our hosted zones for the record of a given name. <END_TASK> <USER_TASK:> Description: def get_domain_name(self, domain_name, route53=True): """ Scan our hosted zones for the record of a given name. Returns the record entry, else None. """
# Make sure api gateway domain is present try: self.apigateway_client.get_domain_name(domainName=domain_name) except Exception: return None if not route53: return True try: zones = self.get_all_zones() for zone in zones['HostedZones']: records = self.route53.list_resource_record_sets(HostedZoneId=zone['Id']) for record in records['ResourceRecordSets']: if record['Type'] in ('CNAME', 'A') and record['Name'][:-1] == domain_name: return record except Exception as e: return None ## # Old, automatic logic. # If re-introduced, should be moved to a new function. # Related ticket: https://github.com/Miserlou/Zappa/pull/458 ## # We may be in a position where Route53 doesn't have a domain, but the API Gateway does. # We need to delete this before we can create the new Route53. # try: # api_gateway_domain = self.apigateway_client.get_domain_name(domainName=domain_name) # self.apigateway_client.delete_domain_name(domainName=domain_name) # except Exception: # pass return None
<SYSTEM_TASK:> Given our role name, get and set the credentials_arn. <END_TASK> <USER_TASK:> Description: def get_credentials_arn(self): """ Given our role name, get and set the credentials_arn. """
role = self.iam.Role(self.role_name) self.credentials_arn = role.arn return role, self.credentials_arn
<SYSTEM_TASK:> Create and defines the IAM roles and policies necessary for Zappa. <END_TASK> <USER_TASK:> Description: def create_iam_roles(self): """ Create and defines the IAM roles and policies necessary for Zappa. If the IAM role already exists, it will be updated if necessary. """
attach_policy_obj = json.loads(self.attach_policy) assume_policy_obj = json.loads(self.assume_policy) if self.extra_permissions: for permission in self.extra_permissions: attach_policy_obj['Statement'].append(dict(permission)) self.attach_policy = json.dumps(attach_policy_obj) updated = False # Create the role if needed try: role, credentials_arn = self.get_credentials_arn() except botocore.client.ClientError: print("Creating " + self.role_name + " IAM Role..") role = self.iam.create_role( RoleName=self.role_name, AssumeRolePolicyDocument=self.assume_policy ) self.credentials_arn = role.arn updated = True # create or update the role's policies if needed policy = self.iam.RolePolicy(self.role_name, 'zappa-permissions') try: if policy.policy_document != attach_policy_obj: print("Updating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True except botocore.client.ClientError: print("Creating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True if role.assume_role_policy_document != assume_policy_obj and \ set(role.assume_role_policy_document['Statement'][0]['Principal']['Service']) != set(assume_policy_obj['Statement'][0]['Principal']['Service']): print("Updating assume role policy on " + self.role_name + " IAM Role.") self.iam_client.update_assume_role_policy( RoleName=self.role_name, PolicyDocument=self.assume_policy ) updated = True return self.credentials_arn, updated
<SYSTEM_TASK:> Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates. <END_TASK> <USER_TASK:> Description: def _clear_policy(self, lambda_name): """ Remove obsolete policy statements to prevent policy from bloating over the limit after repeated updates. """
try: policy_response = self.lambda_client.get_policy( FunctionName=lambda_name ) if policy_response['ResponseMetadata']['HTTPStatusCode'] == 200: statement = json.loads(policy_response['Policy'])['Statement'] for s in statement: delete_response = self.lambda_client.remove_permission( FunctionName=lambda_name, StatementId=s['Sid'] ) if delete_response['ResponseMetadata']['HTTPStatusCode'] != 204: logger.error('Failed to delete an obsolete policy statement: {}'.format(policy_response)) else: logger.debug('Failed to load Lambda function policy: {}'.format(policy_response)) except ClientError as e: if e.args[0].find('ResourceNotFoundException') > -1: logger.debug('No policy found, must be first run.') else: logger.error('Unexpected client error {}'.format(e.args[0]))
<SYSTEM_TASK:> Returns an AWS-valid Lambda event name. <END_TASK> <USER_TASK:> Description: def get_event_name(lambda_name, name): """ Returns an AWS-valid Lambda event name. """
return '{prefix:.{width}}-{postfix}'.format(prefix=lambda_name, width=max(0, 63 - len(name)), postfix=name)[:64]
<SYSTEM_TASK:> Returns an AWS-valid CloudWatch rule name using a digest of the event name, lambda name, and function. <END_TASK> <USER_TASK:> Description: def get_hashed_rule_name(event, function, lambda_name): """ Returns an AWS-valid CloudWatch rule name using a digest of the event name, lambda name, and function. This allows support for rule names that may be longer than the 64 char limit. """
event_name = event.get('name', function) name_hash = hashlib.sha1('{}-{}'.format(lambda_name, event_name).encode('UTF-8')).hexdigest() return Zappa.get_event_name(name_hash, function)
<SYSTEM_TASK:> Delete a CWE rule. <END_TASK> <USER_TASK:> Description: def delete_rule(self, rule_name): """ Delete a CWE rule. This deletes them, but they will still show up in the AWS console. Annoying. """
logger.debug('Deleting existing rule {}'.format(rule_name)) # All targets must be removed before # we can actually delete the rule. try: targets = self.events_client.list_targets_by_rule(Rule=rule_name) except botocore.exceptions.ClientError as e: # This avoids misbehavior if low permissions, related: https://github.com/Miserlou/Zappa/issues/286 error_code = e.response['Error']['Code'] if error_code == 'AccessDeniedException': raise else: logger.debug('No target found for this rule: {} {}'.format(rule_name, e.args[0])) return if 'Targets' in targets and targets['Targets']: self.events_client.remove_targets(Rule=rule_name, Ids=[x['Id'] for x in targets['Targets']]) else: # pragma: no cover logger.debug('No target to delete') # Delete our rule. self.events_client.delete_rule(Name=rule_name)
<SYSTEM_TASK:> Get all of the rule names associated with a lambda function. <END_TASK> <USER_TASK:> Description: def get_event_rule_names_for_lambda(self, lambda_arn): """ Get all of the rule names associated with a lambda function. """
response = self.events_client.list_rule_names_by_target(TargetArn=lambda_arn) rule_names = response['RuleNames'] # Iterate when the results are paginated while 'NextToken' in response: response = self.events_client.list_rule_names_by_target(TargetArn=lambda_arn, NextToken=response['NextToken']) rule_names.extend(response['RuleNames']) return rule_names
<SYSTEM_TASK:> Get all of the rule details associated with this function. <END_TASK> <USER_TASK:> Description: def get_event_rules_for_lambda(self, lambda_arn): """ Get all of the rule details associated with this function. """
rule_names = self.get_event_rule_names_for_lambda(lambda_arn=lambda_arn) return [self.events_client.describe_rule(Name=r) for r in rule_names]
<SYSTEM_TASK:> Given a list of events, unschedule these CloudWatch Events. <END_TASK> <USER_TASK:> Description: def unschedule_events(self, events, lambda_arn=None, lambda_name=None, excluded_source_services=None): excluded_source_services = excluded_source_services or [] """ Given a list of events, unschedule these CloudWatch Events. 'events' is a list of dictionaries, where the dict must contains the string of a 'function' and the string of the event 'expression', and an optional 'name' and 'description'. """
self._clear_policy(lambda_name) rule_names = self.get_event_rule_names_for_lambda(lambda_arn=lambda_arn) for rule_name in rule_names: self.delete_rule(rule_name) print('Unscheduled ' + rule_name + '.') non_cwe = [e for e in events if 'event_source' in e] for event in non_cwe: # TODO: This WILL miss non CW events that have been deployed but changed names. Figure out a way to remove # them no matter what. # These are non CWE event sources. function = event['function'] name = event.get('name', function) event_source = event.get('event_source', function) service = self.service_from_arn(event_source['arn']) # DynamoDB and Kinesis streams take quite a while to setup after they are created and do not need to be # re-scheduled when a new Lambda function is deployed. Therefore, they should not be removed during zappa # update or zappa schedule. if service not in excluded_source_services: remove_event_source( event_source, lambda_arn, function, self.boto_session ) print("Removed event {}{}.".format( name, " ({})".format(str(event_source['events'])) if 'events' in event_source else '') )
<SYSTEM_TASK:> Fetch the CloudWatch logs for a given Lambda name. <END_TASK> <USER_TASK:> Description: def fetch_logs(self, lambda_name, filter_pattern='', limit=10000, start_time=0): """ Fetch the CloudWatch logs for a given Lambda name. """
log_name = '/aws/lambda/' + lambda_name streams = self.logs_client.describe_log_streams( logGroupName=log_name, descending=True, orderBy='LastEventTime' ) all_streams = streams['logStreams'] all_names = [stream['logStreamName'] for stream in all_streams] events = [] response = {} while not response or 'nextToken' in response: extra_args = {} if 'nextToken' in response: extra_args['nextToken'] = response['nextToken'] # Amazon uses millisecond epoch for some reason. # Thanks, Jeff. start_time = start_time * 1000 end_time = int(time.time()) * 1000 response = self.logs_client.filter_log_events( logGroupName=log_name, logStreamNames=all_names, startTime=start_time, endTime=end_time, filterPattern=filter_pattern, limit=limit, interleaved=True, # Does this actually improve performance? **extra_args ) if response and 'events' in response: events += response['events'] return sorted(events, key=lambda k: k['timestamp'])
<SYSTEM_TASK:> Filter all log groups that match the name given in log_filter. <END_TASK> <USER_TASK:> Description: def remove_log_group(self, group_name): """ Filter all log groups that match the name given in log_filter. """
print("Removing log group: {}".format(group_name)) try: self.logs_client.delete_log_group(logGroupName=group_name) except botocore.exceptions.ClientError as e: print("Couldn't remove '{}' because of: {}".format(group_name, e))
<SYSTEM_TASK:> Removed all logs that are assigned to a given rest api id. <END_TASK> <USER_TASK:> Description: def remove_api_gateway_logs(self, project_name): """ Removed all logs that are assigned to a given rest api id. """
for rest_api in self.get_rest_apis(project_name): for stage in self.apigateway_client.get_stages(restApiId=rest_api['id'])['item']: self.remove_log_group('API-Gateway-Execution-Logs_{}/{}'.format(rest_api['id'], stage['stageName']))
<SYSTEM_TASK:> Get the Hosted Zone ID for a given domain. <END_TASK> <USER_TASK:> Description: def get_hosted_zone_id_for_domain(self, domain): """ Get the Hosted Zone ID for a given domain. """
all_zones = self.get_all_zones() return self.get_best_match_zone(all_zones, domain)
<SYSTEM_TASK:> Return zone id which name is closer matched with domain name. <END_TASK> <USER_TASK:> Description: def get_best_match_zone(all_zones, domain): """Return zone id which name is closer matched with domain name."""
# Related: https://github.com/Miserlou/Zappa/issues/459 public_zones = [zone for zone in all_zones['HostedZones'] if not zone['Config']['PrivateZone']] zones = {zone['Name'][:-1]: zone['Id'] for zone in public_zones if zone['Name'][:-1] in domain} if zones: keys = max(zones.keys(), key=lambda a: len(a)) # get longest key -- best match. return zones[keys] else: return None
<SYSTEM_TASK:> Load AWS credentials. <END_TASK> <USER_TASK:> Description: def load_credentials(self, boto_session=None, profile_name=None): """ Load AWS credentials. An optional boto_session can be provided, but that's usually for testing. An optional profile_name can be provided for config files that have multiple sets of credentials. """
# Automatically load credentials from config or environment if not boto_session: # If provided, use the supplied profile name. if profile_name: self.boto_session = boto3.Session(profile_name=profile_name, region_name=self.aws_region) elif os.environ.get('AWS_ACCESS_KEY_ID') and os.environ.get('AWS_SECRET_ACCESS_KEY'): region_name = os.environ.get('AWS_DEFAULT_REGION') or self.aws_region session_kw = { "aws_access_key_id": os.environ.get('AWS_ACCESS_KEY_ID'), "aws_secret_access_key": os.environ.get('AWS_SECRET_ACCESS_KEY'), "region_name": region_name, } # If we're executing in a role, AWS_SESSION_TOKEN will be present, too. if os.environ.get("AWS_SESSION_TOKEN"): session_kw["aws_session_token"] = os.environ.get("AWS_SESSION_TOKEN") self.boto_session = boto3.Session(**session_kw) else: self.boto_session = boto3.Session(region_name=self.aws_region) logger.debug("Loaded boto session from config: %s", boto_session) else: logger.debug("Using provided boto session: %s", boto_session) self.boto_session = boto_session # use provided session's region in case it differs self.aws_region = self.boto_session.region_name if self.boto_session.region_name not in LAMBDA_REGIONS: print("Warning! AWS Lambda may not be available in this AWS Region!") if self.boto_session.region_name not in API_GATEWAY_REGIONS: print("Warning! AWS API Gateway may not be available in this AWS Region!")
<SYSTEM_TASK:> Parse account key to get public key <END_TASK> <USER_TASK:> Description: def parse_account_key(): """Parse account key to get public key"""
LOGGER.info("Parsing account key...") cmd = [ 'openssl', 'rsa', '-in', os.path.join(gettempdir(), 'account.key'), '-noout', '-text' ] devnull = open(os.devnull, 'wb') return subprocess.check_output(cmd, stderr=devnull)
<SYSTEM_TASK:> Use regular expressions to find crypto values from parsed account key, <END_TASK> <USER_TASK:> Description: def get_boulder_header(key_bytes): """ Use regular expressions to find crypto values from parsed account key, and return a header we can send to our Boulder instance. """
pub_hex, pub_exp = re.search( r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)", key_bytes.decode('utf8'), re.MULTILINE | re.DOTALL).groups() pub_exp = "{0:x}".format(int(pub_exp)) pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp header = { "alg": "RS256", "jwk": { "e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))), "kty": "RSA", "n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))), }, } return header
<SYSTEM_TASK:> Loop until our challenge is verified, else fail. <END_TASK> <USER_TASK:> Description: def verify_challenge(uri): """ Loop until our challenge is verified, else fail. """
while True: try: resp = urlopen(uri) challenge_status = json.loads(resp.read().decode('utf8')) except IOError as e: raise ValueError("Error checking challenge: {0} {1}".format( e.code, json.loads(e.read().decode('utf8')))) if challenge_status['status'] == "pending": time.sleep(2) elif challenge_status['status'] == "valid": LOGGER.info("Domain verified!") break else: raise ValueError("Domain challenge did not pass: {0}".format( challenge_status))
<SYSTEM_TASK:> Helper function to make signed requests to Boulder <END_TASK> <USER_TASK:> Description: def _send_signed_request(url, payload): """ Helper function to make signed requests to Boulder """
payload64 = _b64(json.dumps(payload).encode('utf8')) out = parse_account_key() header = get_boulder_header(out) protected = copy.deepcopy(header) protected["nonce"] = urlopen(DEFAULT_CA + "/directory").headers['Replay-Nonce'] protected64 = _b64(json.dumps(protected).encode('utf8')) cmd = [ 'openssl', 'dgst', '-sha256', '-sign', os.path.join(gettempdir(), 'account.key') ] proc = subprocess.Popen( cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode('utf8')) if proc.returncode != 0: # pragma: no cover raise IOError("OpenSSL Error: {0}".format(err)) data = json.dumps({ "header": header, "protected": protected64, "payload": payload64, "signature": _b64(out), }) try: resp = urlopen(url, data.encode('utf8')) return resp.getcode(), resp.read() except IOError as e: return getattr(e, "code", None), getattr(e, "read", e.__str__)()
<SYSTEM_TASK:> Shamelessly promote our little community. <END_TASK> <USER_TASK:> Description: def shamelessly_promote(): """ Shamelessly promote our little community. """
click.echo("Need " + click.style("help", fg='green', bold=True) + "? Found a " + click.style("bug", fg='green', bold=True) + "? Let us " + click.style("know", fg='green', bold=True) + "! :D") click.echo("File bug reports on " + click.style("GitHub", bold=True) + " here: " + click.style("https://github.com/Miserlou/Zappa", fg='cyan', bold=True)) click.echo("And join our " + click.style("Slack", bold=True) + " channel here: " + click.style("https://slack.zappa.io", fg='cyan', bold=True)) click.echo("Love!,") click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
<SYSTEM_TASK:> A shortcut property for settings of a stage. <END_TASK> <USER_TASK:> Description: def stage_config(self): """ A shortcut property for settings of a stage. """
def get_stage_setting(stage, extended_stages=None): if extended_stages is None: extended_stages = [] if stage in extended_stages: raise RuntimeError(stage + " has already been extended to these settings. " "There is a circular extends within the settings file.") extended_stages.append(stage) try: stage_settings = dict(self.zappa_settings[stage].copy()) except KeyError: raise ClickException("Cannot extend settings for undefined stage '" + stage + "'.") extends_stage = self.zappa_settings[stage].get('extends', None) if not extends_stage: return stage_settings extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages) extended_settings.update(stage_settings) return extended_settings settings = get_stage_setting(stage=self.api_stage) # Backwards compatible for delete_zip setting that was more explicitly named delete_local_zip if u'delete_zip' in settings: settings[u'delete_local_zip'] = settings.get(u'delete_zip') settings.update(self.stage_config_overrides) return settings
<SYSTEM_TASK:> Only build the package <END_TASK> <USER_TASK:> Description: def package(self, output=None): """ Only build the package """
# Make sure we're in a venv. self.check_venv() # force not to delete the local zip self.override_stage_config_setting('delete_local_zip', False) # Execute the prebuild script if self.prebuild_script: self.execute_prebuild_script() # Create the Lambda Zip self.create_package(output) self.callback('zip') size = human_size(os.path.getsize(self.zip_path)) click.echo(click.style("Package created", fg="green", bold=True) + ": " + click.style(self.zip_path, bold=True) + " (" + size + ")")
<SYSTEM_TASK:> Rollsback the currently deploy lambda code to a previous revision. <END_TASK> <USER_TASK:> Description: def rollback(self, revision): """ Rollsback the currently deploy lambda code to a previous revision. """
print("Rolling back..") self.zappa.rollback_lambda_function_version( self.lambda_name, versions_back=revision) print("Done!")
<SYSTEM_TASK:> Tail this function's logs. <END_TASK> <USER_TASK:> Description: def tail(self, since, filter_pattern, limit=10000, keep_open=True, colorize=True, http=False, non_http=False, force_colorize=False): """ Tail this function's logs. if keep_open, do so repeatedly, printing any new logs """
try: since_stamp = string_to_timestamp(since) last_since = since_stamp while True: new_logs = self.zappa.fetch_logs( self.lambda_name, start_time=since_stamp, limit=limit, filter_pattern=filter_pattern, ) new_logs = [ e for e in new_logs if e['timestamp'] > last_since ] self.print_logs(new_logs, colorize, http, non_http, force_colorize) if not keep_open: break if new_logs: last_since = new_logs[-1]['timestamp'] time.sleep(1) except KeyboardInterrupt: # pragma: no cover # Die gracefully try: sys.exit(0) except SystemExit: os._exit(130)
<SYSTEM_TASK:> Tear down an existing deployment. <END_TASK> <USER_TASK:> Description: def undeploy(self, no_confirm=False, remove_logs=False): """ Tear down an existing deployment. """
if not no_confirm: # pragma: no cover confirm = input("Are you sure you want to undeploy? [y/n] ") if confirm != 'y': return if self.use_alb: self.zappa.undeploy_lambda_alb(self.lambda_name) if self.use_apigateway: if remove_logs: self.zappa.remove_api_gateway_logs(self.lambda_name) domain_name = self.stage_config.get('domain', None) base_path = self.stage_config.get('base_path', None) # Only remove the api key when not specified if self.api_key_required and self.api_key is None: api_id = self.zappa.get_api_id(self.lambda_name) self.zappa.remove_api_key(api_id, self.api_stage) gateway_id = self.zappa.undeploy_api_gateway( self.lambda_name, domain_name=domain_name, base_path=base_path ) self.unschedule() # removes event triggers, including warm up event. self.zappa.delete_lambda_function(self.lambda_name) if remove_logs: self.zappa.remove_lambda_function_logs(self.lambda_name) click.echo(click.style("Done", fg="green", bold=True) + "!")
<SYSTEM_TASK:> Given a a list of functions and a schedule to execute them, <END_TASK> <USER_TASK:> Description: def schedule(self): """ Given a a list of functions and a schedule to execute them, setup up regular execution. """
events = self.stage_config.get('events', []) if events: if not isinstance(events, list): # pragma: no cover print("Events must be supplied as a list.") return for event in events: self.collision_warning(event.get('function')) if self.stage_config.get('keep_warm', True): if not events: events = [] keep_warm_rate = self.stage_config.get('keep_warm_expression', "rate(4 minutes)") events.append({'name': 'zappa-keep-warm', 'function': 'handler.keep_warm_callback', 'expression': keep_warm_rate, 'description': 'Zappa Keep Warm - {}'.format(self.lambda_name)}) if events: try: function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) except botocore.exceptions.ClientError as e: # pragma: no cover click.echo(click.style("Function does not exist", fg="yellow") + ", please " + click.style("deploy", bold=True) + "first. Ex:" + click.style("zappa deploy {}.".format(self.api_stage), bold=True)) sys.exit(-1) print("Scheduling..") self.zappa.schedule_events( lambda_arn=function_response['Configuration']['FunctionArn'], lambda_name=self.lambda_name, events=events ) # Add async tasks SNS if self.stage_config.get('async_source', None) == 'sns' \ and self.stage_config.get('async_resources', True): self.lambda_arn = self.zappa.get_lambda_function( function_name=self.lambda_name) topic_arn = self.zappa.create_async_sns_topic( lambda_name=self.lambda_name, lambda_arn=self.lambda_arn ) click.echo('SNS Topic created: %s' % topic_arn) # Add async tasks DynamoDB table_name = self.stage_config.get('async_response_table', False) read_capacity = self.stage_config.get('async_response_table_read_capacity', 1) write_capacity = self.stage_config.get('async_response_table_write_capacity', 1) if table_name and self.stage_config.get('async_resources', True): created, response_table = self.zappa.create_async_dynamodb_table( table_name, read_capacity, write_capacity) if created: click.echo('DynamoDB table created: %s' % table_name) else: click.echo('DynamoDB table exists: %s' % table_name) provisioned_throughput = response_table['Table']['ProvisionedThroughput'] if provisioned_throughput['ReadCapacityUnits'] != read_capacity or \ provisioned_throughput['WriteCapacityUnits'] != write_capacity: click.echo(click.style( "\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format(table_name), fg='red' ))
<SYSTEM_TASK:> Given a a list of scheduled functions, <END_TASK> <USER_TASK:> Description: def unschedule(self): """ Given a a list of scheduled functions, tear down their regular execution. """
# Run even if events are not defined to remove previously existing ones (thus default to []). events = self.stage_config.get('events', []) if not isinstance(events, list): # pragma: no cover print("Events must be supplied as a list.") return function_arn = None try: function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) function_arn = function_response['Configuration']['FunctionArn'] except botocore.exceptions.ClientError as e: # pragma: no cover raise ClickException("Function does not exist, you should deploy first. Ex: zappa deploy {}. " "Proceeding to unschedule CloudWatch based events.".format(self.api_stage)) print("Unscheduling..") self.zappa.unschedule_events( lambda_name=self.lambda_name, lambda_arn=function_arn, events=events, ) # Remove async task SNS if self.stage_config.get('async_source', None) == 'sns' \ and self.stage_config.get('async_resources', True): removed_arns = self.zappa.remove_async_sns_topic(self.lambda_name) click.echo('SNS Topic removed: %s' % ', '.join(removed_arns))
<SYSTEM_TASK:> Apply various heuristics to return a colorized version the invoke <END_TASK> <USER_TASK:> Description: def colorize_invoke_command(self, string): """ Apply various heuristics to return a colorized version the invoke command string. If these fail, simply return the string in plaintext. Inspired by colorize_log_entry(). """
final_string = string try: # Line headers try: for token in ['START', 'END', 'REPORT', '[DEBUG]']: if token in final_string: format_string = '[{}]' # match whole words only pattern = r'\b{}\b' if token == '[DEBUG]': format_string = '{}' pattern = re.escape(token) repl = click.style( format_string.format(token), bold=True, fg='cyan' ) final_string = re.sub( pattern.format(token), repl, final_string ) except Exception: # pragma: no cover pass # Green bold Tokens try: for token in [ 'Zappa Event:', 'RequestId:', 'Version:', 'Duration:', 'Billed', 'Memory Size:', 'Max Memory Used:' ]: if token in final_string: final_string = final_string.replace(token, click.style( token, bold=True, fg='green' )) except Exception: # pragma: no cover pass # UUIDs for token in final_string.replace('\t', ' ').split(' '): try: if token.count('-') is 4 and token.replace('-', '').isalnum(): final_string = final_string.replace( token, click.style(token, fg='magenta') ) except Exception: # pragma: no cover pass return final_string except Exception: return string
<SYSTEM_TASK:> Make sure the environment contains only strings <END_TASK> <USER_TASK:> Description: def check_environment(self, environment): """ Make sure the environment contains only strings (since putenv needs a string) """
non_strings = [] for (k,v) in environment.items(): if not isinstance(v, basestring): non_strings.append(k) if non_strings: raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings))) else: return True
<SYSTEM_TASK:> Allows the execution of custom code between creation of the zip file and deployment to AWS. <END_TASK> <USER_TASK:> Description: def callback(self, position): """ Allows the execution of custom code between creation of the zip file and deployment to AWS. :return: None """
callbacks = self.stage_config.get('callbacks', {}) callback = callbacks.get(position) if callback: (mod_path, cb_func_name) = callback.rsplit('.', 1) try: # Prefer callback in working directory if mod_path.count('.') >= 1: # Callback function is nested in a folder (mod_folder_path, mod_name) = mod_path.rsplit('.', 1) mod_folder_path_fragments = mod_folder_path.split('.') working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments) else: mod_name = mod_path working_dir = os.getcwd() working_dir_importer = pkgutil.get_importer(working_dir) module_ = working_dir_importer.find_module(mod_name).load_module(mod_name) except (ImportError, AttributeError): try: # Callback func might be in virtualenv module_ = importlib.import_module(mod_path) except ImportError: # pragma: no cover raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style( "import {position} callback ".format(position=position), bold=True) + 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True))) if not hasattr(module_, cb_func_name): # pragma: no cover raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style( "find {position} callback ".format(position=position), bold=True) + 'function: "{cb_func_name}" '.format( cb_func_name=click.style(cb_func_name, bold=True)) + 'in module "{mod_path}"'.format(mod_path=mod_path)) cb_func = getattr(module_, cb_func_name) cb_func(self)
<SYSTEM_TASK:> Print a warning if there's a new Zappa version available. <END_TASK> <USER_TASK:> Description: def check_for_update(self): """ Print a warning if there's a new Zappa version available. """
try: version = pkg_resources.require("zappa")[0].version updateable = check_new_version_available(version) if updateable: click.echo(click.style("Important!", fg="yellow", bold=True) + " A new version of " + click.style("Zappa", bold=True) + " is available!") click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True)) click.echo("Visit the project page on GitHub to see the latest changes: " + click.style("https://github.com/Miserlou/Zappa", bold=True)) except Exception as e: # pragma: no cover print(e) return
<SYSTEM_TASK:> Parse, filter and print logs to the console. <END_TASK> <USER_TASK:> Description: def print_logs(self, logs, colorize=True, http=False, non_http=False, force_colorize=None): """ Parse, filter and print logs to the console. """
for log in logs: timestamp = log['timestamp'] message = log['message'] if "START RequestId" in message: continue if "REPORT RequestId" in message: continue if "END RequestId" in message: continue if not colorize and not force_colorize: if http: if self.is_http_log_entry(message.strip()): print("[" + str(timestamp) + "] " + message.strip()) elif non_http: if not self.is_http_log_entry(message.strip()): print("[" + str(timestamp) + "] " + message.strip()) else: print("[" + str(timestamp) + "] " + message.strip()) else: if http: if self.is_http_log_entry(message.strip()): click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()), color=force_colorize) elif non_http: if not self.is_http_log_entry(message.strip()): click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()), color=force_colorize) else: click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()), color=force_colorize)
<SYSTEM_TASK:> Determines if a log entry is an HTTP-formatted log string or not. <END_TASK> <USER_TASK:> Description: def is_http_log_entry(self, string): """ Determines if a log entry is an HTTP-formatted log string or not. """
# Debug event filter if 'Zappa Event' in string: return False # IP address filter for token in string.replace('\t', ' ').split(' '): try: if (token.count('.') is 3 and token.replace('.', '').isnumeric()): return True except Exception: # pragma: no cover pass return False
<SYSTEM_TASK:> Apply various heuristics to return a colorized version of a string. <END_TASK> <USER_TASK:> Description: def colorize_log_entry(self, string): """ Apply various heuristics to return a colorized version of a string. If these fail, simply return the string in plaintext. """
final_string = string try: # First, do stuff in square brackets inside_squares = re.findall(r'\[([^]]*)\]', string) for token in inside_squares: if token in ['CRITICAL', 'ERROR', 'WARNING', 'DEBUG', 'INFO', 'NOTSET']: final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, fg='cyan', bold=True) + click.style("]", fg='cyan')) else: final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, bold=True) + click.style("]", fg='cyan')) # Then do quoted strings quotes = re.findall(r'"[^"]*"', string) for token in quotes: final_string = final_string.replace(token, click.style(token, fg="yellow")) # And UUIDs for token in final_string.replace('\t', ' ').split(' '): try: if token.count('-') is 4 and token.replace('-', '').isalnum(): final_string = final_string.replace(token, click.style(token, fg="magenta")) except Exception: # pragma: no cover pass # And IP addresses try: if token.count('.') is 3 and token.replace('.', '').isnumeric(): final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass # And status codes try: if token in ['200']: final_string = final_string.replace(token, click.style(token, fg="green")) if token in ['400', '401', '403', '404', '405', '500']: final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass # And Zappa Events try: if "Zappa Event:" in final_string: final_string = final_string.replace("Zappa Event:", click.style("Zappa Event:", bold=True, fg="green")) except Exception: # pragma: no cover pass # And dates for token in final_string.split('\t'): try: is_date = parser.parse(token) final_string = final_string.replace(token, click.style(token, fg="green")) except Exception: # pragma: no cover pass final_string = final_string.replace('\t', ' ').replace(' ', ' ') if final_string[0] != ' ': final_string = ' ' + final_string return final_string except Exception as e: # pragma: no cover return string
<SYSTEM_TASK:> Parse and execute the prebuild_script from the zappa_settings. <END_TASK> <USER_TASK:> Description: def execute_prebuild_script(self): """ Parse and execute the prebuild_script from the zappa_settings. """
(pb_mod_path, pb_func) = self.prebuild_script.rsplit('.', 1) try: # Prefer prebuild script in working directory if pb_mod_path.count('.') >= 1: # Prebuild script func is nested in a folder (mod_folder_path, mod_name) = pb_mod_path.rsplit('.', 1) mod_folder_path_fragments = mod_folder_path.split('.') working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments) else: mod_name = pb_mod_path working_dir = os.getcwd() working_dir_importer = pkgutil.get_importer(working_dir) module_ = working_dir_importer.find_module(mod_name).load_module(mod_name) except (ImportError, AttributeError): try: # Prebuild func might be in virtualenv module_ = importlib.import_module(pb_mod_path) except ImportError: # pragma: no cover raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style( "import prebuild script ", bold=True) + 'module: "{pb_mod_path}"'.format( pb_mod_path=click.style(pb_mod_path, bold=True))) if not hasattr(module_, pb_func): # pragma: no cover raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style( "find prebuild script ", bold=True) + 'function: "{pb_func}" '.format( pb_func=click.style(pb_func, bold=True)) + 'in module "{pb_mod_path}"'.format( pb_mod_path=pb_mod_path)) prebuild_function = getattr(module_, pb_func) prebuild_function()
<SYSTEM_TASK:> Given a string, print a warning if this could <END_TASK> <USER_TASK:> Description: def collision_warning(self, item): """ Given a string, print a warning if this could collide with a Zappa core package module. Use for app functions and events. """
namespace_collisions = [ "zappa.", "wsgi.", "middleware.", "handler.", "util.", "letsencrypt.", "cli." ] for namespace_collision in namespace_collisions: if item.startswith(namespace_collision): click.echo(click.style("Warning!", fg="red", bold=True) + " You may have a namespace collision between " + click.style(item, bold=True) + " and " + click.style(namespace_collision, bold=True) + "! You may want to rename that file.")
<SYSTEM_TASK:> Ensure we're inside a virtualenv. <END_TASK> <USER_TASK:> Description: def check_venv(self): """ Ensure we're inside a virtualenv. """
if self.zappa: venv = self.zappa.get_current_venv() else: # Just for `init`, when we don't have settings yet. venv = Zappa.get_current_venv() if not venv: raise ClickException( click.style("Zappa", bold=True) + " requires an " + click.style("active virtual environment", bold=True, fg="red") + "!\n" + "Learn more about virtual environments here: " + click.style("http://docs.python-guide.org/en/latest/dev/virtualenvs/", bold=False, fg="cyan"))
<SYSTEM_TASK:> Test the deployed endpoint with a GET request. <END_TASK> <USER_TASK:> Description: def touch_endpoint(self, endpoint_url): """ Test the deployed endpoint with a GET request. """
# Private APIGW endpoints most likely can't be reached by a deployer # unless they're connected to the VPC by VPN. Instead of trying # connect to the service, print a warning and let the user know # to check it manually. # See: https://github.com/Miserlou/Zappa/pull/1719#issuecomment-471341565 if 'PRIVATE' in self.stage_config.get('endpoint_configuration', []): print( click.style("Warning!", fg="yellow", bold=True) + " Since you're deploying a private API Gateway endpoint," " Zappa cannot determine if your function is returning " " a correct status code. You should check your API's response" " manually before considering this deployment complete." ) return touch_path = self.stage_config.get('touch_path', '/') req = requests.get(endpoint_url + touch_path) # Sometimes on really large packages, it can take 60-90 secs to be # ready and requests will return 504 status_code until ready. # So, if we get a 504 status code, rerun the request up to 4 times or # until we don't get a 504 error if req.status_code == 504: i = 0 status_code = 504 while status_code == 504 and i <= 4: req = requests.get(endpoint_url + touch_path) status_code = req.status_code i += 1 if req.status_code >= 500: raise ClickException(click.style("Warning!", fg="red", bold=True) + " Status check on the deployed lambda failed." + " A GET request to '" + touch_path + "' yielded a " + click.style(str(req.status_code), fg="red", bold=True) + " response code.")
<SYSTEM_TASK:> Permute all casings of a given string. <END_TASK> <USER_TASK:> Description: def all_casings(input_string): """ Permute all casings of a given string. A pretty algorithm, via @Amber http://stackoverflow.com/questions/6792803/finding-all-possible-case-permutations-in-python """
if not input_string: yield "" else: first = input_string[:1] if first.lower() == first.upper(): for sub_casing in all_casings(input_string[1:]): yield first + sub_casing else: for sub_casing in all_casings(input_string[1:]): yield first.lower() + sub_casing yield first.upper() + sub_casing
<SYSTEM_TASK:> Get encoding from request headers or page head. <END_TASK> <USER_TASK:> Description: def get_encoding(headers, content): """Get encoding from request headers or page head."""
encoding = None content_type = headers.get('content-type') if content_type: _, params = cgi.parse_header(content_type) if 'charset' in params: encoding = params['charset'].strip("'\"") if not encoding: content = utils.pretty_unicode(content[:1000]) if six.PY3 else content charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') encoding = (charset_re.findall(content) + pragma_re.findall(content) + xml_re.findall(content)) encoding = encoding and encoding[0] or None return encoding