Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
reject_location_related_install_options
( requirements: List[InstallRequirement], options: Optional[List[str]] )
If any location-changing --install-option arguments were passed for requirements or on the command-line, then show a deprecation warning.
If any location-changing --install-option arguments were passed for requirements or on the command-line, then show a deprecation warning.
def reject_location_related_install_options( requirements: List[InstallRequirement], options: Optional[List[str]] ) -> None: """If any location-changing --install-option arguments were passed for requirements or on the command-line, then show a deprecation warning. """ def format_options(option_names: Iterable[str]) -> List[str]: return ["--{}".format(name.replace("_", "-")) for name in option_names] offenders = [] for requirement in requirements: install_options = requirement.install_options location_options = parse_distutils_args(install_options) if location_options: offenders.append( "{!r} from {}".format( format_options(location_options.keys()), requirement ) ) if options: location_options = parse_distutils_args(options) if location_options: offenders.append( "{!r} from command line".format( format_options(location_options.keys()) ) ) if not offenders: return raise CommandError( "Location-changing options found in --install-option: {}." " This is unsupported, use pip-level options like --user," " --prefix, --root, and --target instead.".format( "; ".join(offenders) ) )
[ "def", "reject_location_related_install_options", "(", "requirements", ":", "List", "[", "InstallRequirement", "]", ",", "options", ":", "Optional", "[", "List", "[", "str", "]", "]", ")", "->", "None", ":", "def", "format_options", "(", "option_names", ":", "Iterable", "[", "str", "]", ")", "->", "List", "[", "str", "]", ":", "return", "[", "\"--{}\"", ".", "format", "(", "name", ".", "replace", "(", "\"_\"", ",", "\"-\"", ")", ")", "for", "name", "in", "option_names", "]", "offenders", "=", "[", "]", "for", "requirement", "in", "requirements", ":", "install_options", "=", "requirement", ".", "install_options", "location_options", "=", "parse_distutils_args", "(", "install_options", ")", "if", "location_options", ":", "offenders", ".", "append", "(", "\"{!r} from {}\"", ".", "format", "(", "format_options", "(", "location_options", ".", "keys", "(", ")", ")", ",", "requirement", ")", ")", "if", "options", ":", "location_options", "=", "parse_distutils_args", "(", "options", ")", "if", "location_options", ":", "offenders", ".", "append", "(", "\"{!r} from command line\"", ".", "format", "(", "format_options", "(", "location_options", ".", "keys", "(", ")", ")", ")", ")", "if", "not", "offenders", ":", "return", "raise", "CommandError", "(", "\"Location-changing options found in --install-option: {}.\"", "\" This is unsupported, use pip-level options like --user,\"", "\" --prefix, --root, and --target instead.\"", ".", "format", "(", "\"; \"", ".", "join", "(", "offenders", ")", ")", ")" ]
[ 660, 0 ]
[ 699, 5 ]
python
en
['en', 'en', 'en']
True
create_os_error_message
( error: OSError, show_traceback: bool, using_user_site: bool )
Format an error message for an OSError It may occur anytime during the execution of the install command.
Format an error message for an OSError
def create_os_error_message( error: OSError, show_traceback: bool, using_user_site: bool ) -> str: """Format an error message for an OSError It may occur anytime during the execution of the install command. """ parts = [] # Mention the error if we are not going to show a traceback parts.append("Could not install packages due to an OSError") if not show_traceback: parts.append(": ") parts.append(str(error)) else: parts.append(".") # Spilt the error indication from a helper message (if any) parts[-1] += "\n" # Suggest useful actions to the user: # (1) using user site-packages or (2) verifying the permissions if error.errno == errno.EACCES: user_option_part = "Consider using the `--user` option" permissions_part = "Check the permissions" if not running_under_virtualenv() and not using_user_site: parts.extend([ user_option_part, " or ", permissions_part.lower(), ]) else: parts.append(permissions_part) parts.append(".\n") # Suggest the user to enable Long Paths if path length is # more than 260 if (WINDOWS and error.errno == errno.ENOENT and error.filename and len(error.filename) > 260): parts.append( "HINT: This error might have occurred since " "this system does not have Windows Long Path " "support enabled. You can find information on " "how to enable this at " "https://pip.pypa.io/warnings/enable-long-paths\n" ) return "".join(parts).strip() + "\n"
[ "def", "create_os_error_message", "(", "error", ":", "OSError", ",", "show_traceback", ":", "bool", ",", "using_user_site", ":", "bool", ")", "->", "str", ":", "parts", "=", "[", "]", "# Mention the error if we are not going to show a traceback", "parts", ".", "append", "(", "\"Could not install packages due to an OSError\"", ")", "if", "not", "show_traceback", ":", "parts", ".", "append", "(", "\": \"", ")", "parts", ".", "append", "(", "str", "(", "error", ")", ")", "else", ":", "parts", ".", "append", "(", "\".\"", ")", "# Spilt the error indication from a helper message (if any)", "parts", "[", "-", "1", "]", "+=", "\"\\n\"", "# Suggest useful actions to the user:", "# (1) using user site-packages or (2) verifying the permissions", "if", "error", ".", "errno", "==", "errno", ".", "EACCES", ":", "user_option_part", "=", "\"Consider using the `--user` option\"", "permissions_part", "=", "\"Check the permissions\"", "if", "not", "running_under_virtualenv", "(", ")", "and", "not", "using_user_site", ":", "parts", ".", "extend", "(", "[", "user_option_part", ",", "\" or \"", ",", "permissions_part", ".", "lower", "(", ")", ",", "]", ")", "else", ":", "parts", ".", "append", "(", "permissions_part", ")", "parts", ".", "append", "(", "\".\\n\"", ")", "# Suggest the user to enable Long Paths if path length is", "# more than 260", "if", "(", "WINDOWS", "and", "error", ".", "errno", "==", "errno", ".", "ENOENT", "and", "error", ".", "filename", "and", "len", "(", "error", ".", "filename", ")", ">", "260", ")", ":", "parts", ".", "append", "(", "\"HINT: This error might have occurred since \"", "\"this system does not have Windows Long Path \"", "\"support enabled. You can find information on \"", "\"how to enable this at \"", "\"https://pip.pypa.io/warnings/enable-long-paths\\n\"", ")", "return", "\"\"", ".", "join", "(", "parts", ")", ".", "strip", "(", ")", "+", "\"\\n\"" ]
[ 702, 0 ]
[ 749, 40 ]
python
br
['br', 'lb', 'en']
False
AccessMixin.get_login_url
(self)
Override this method to override the login_url attribute.
Override this method to override the login_url attribute.
def get_login_url(self): """ Override this method to override the login_url attribute. """ login_url = self.login_url or settings.LOGIN_URL if not login_url: raise ImproperlyConfigured( '{0} is missing the login_url attribute. Define {0}.login_url, settings.LOGIN_URL, or override ' '{0}.get_login_url().'.format(self.__class__.__name__) ) return str(login_url)
[ "def", "get_login_url", "(", "self", ")", ":", "login_url", "=", "self", ".", "login_url", "or", "settings", ".", "LOGIN_URL", "if", "not", "login_url", ":", "raise", "ImproperlyConfigured", "(", "'{0} is missing the login_url attribute. Define {0}.login_url, settings.LOGIN_URL, or override '", "'{0}.get_login_url().'", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ")", ")", "return", "str", "(", "login_url", ")" ]
[ 19, 4 ]
[ 29, 29 ]
python
en
['en', 'error', 'th']
False
AccessMixin.get_permission_denied_message
(self)
Override this method to override the permission_denied_message attribute.
Override this method to override the permission_denied_message attribute.
def get_permission_denied_message(self): """ Override this method to override the permission_denied_message attribute. """ return self.permission_denied_message
[ "def", "get_permission_denied_message", "(", "self", ")", ":", "return", "self", ".", "permission_denied_message" ]
[ 31, 4 ]
[ 35, 45 ]
python
en
['en', 'error', 'th']
False
AccessMixin.get_redirect_field_name
(self)
Override this method to override the redirect_field_name attribute.
Override this method to override the redirect_field_name attribute.
def get_redirect_field_name(self): """ Override this method to override the redirect_field_name attribute. """ return self.redirect_field_name
[ "def", "get_redirect_field_name", "(", "self", ")", ":", "return", "self", ".", "redirect_field_name" ]
[ 37, 4 ]
[ 41, 39 ]
python
en
['en', 'error', 'th']
False
PermissionRequiredMixin.get_permission_required
(self)
Override this method to override the permission_required attribute. Must return an iterable.
Override this method to override the permission_required attribute. Must return an iterable.
def get_permission_required(self): """ Override this method to override the permission_required attribute. Must return an iterable. """ if self.permission_required is None: raise ImproperlyConfigured( '{0} is missing the permission_required attribute. Define {0}.permission_required, or override ' '{0}.get_permission_required().'.format(self.__class__.__name__) ) if isinstance(self.permission_required, str): perms = (self.permission_required,) else: perms = self.permission_required return perms
[ "def", "get_permission_required", "(", "self", ")", ":", "if", "self", ".", "permission_required", "is", "None", ":", "raise", "ImproperlyConfigured", "(", "'{0} is missing the permission_required attribute. Define {0}.permission_required, or override '", "'{0}.get_permission_required().'", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ")", ")", "if", "isinstance", "(", "self", ".", "permission_required", ",", "str", ")", ":", "perms", "=", "(", "self", ".", "permission_required", ",", ")", "else", ":", "perms", "=", "self", ".", "permission_required", "return", "perms" ]
[ 77, 4 ]
[ 91, 20 ]
python
en
['en', 'error', 'th']
False
PermissionRequiredMixin.has_permission
(self)
Override this method to customize the way permissions are checked.
Override this method to customize the way permissions are checked.
def has_permission(self): """ Override this method to customize the way permissions are checked. """ perms = self.get_permission_required() return self.request.user.has_perms(perms)
[ "def", "has_permission", "(", "self", ")", ":", "perms", "=", "self", ".", "get_permission_required", "(", ")", "return", "self", ".", "request", ".", "user", ".", "has_perms", "(", "perms", ")" ]
[ 93, 4 ]
[ 98, 49 ]
python
en
['en', 'error', 'th']
False
UserPassesTestMixin.get_test_func
(self)
Override this method to use a different test_func method.
Override this method to use a different test_func method.
def get_test_func(self): """ Override this method to use a different test_func method. """ return self.test_func
[ "def", "get_test_func", "(", "self", ")", ":", "return", "self", ".", "test_func" ]
[ 117, 4 ]
[ 121, 29 ]
python
en
['en', 'error', 'th']
False
move_to_completion_bucket
(target_bucket, target_infix, **kwargs)
A utility method to move an object to a target location in GCS.
A utility method to move an object to a target location in GCS.
def move_to_completion_bucket(target_bucket, target_infix, **kwargs): """A utility method to move an object to a target location in GCS.""" # Here we establish a connection hook to GoogleCloudStorage. # Google Cloud Composer automatically provides a google_cloud_storage_default # connection id that is used by this hook. conn = gcs_hook.GoogleCloudStorageHook() # The external trigger (Google Cloud Function) that initiates this DAG # provides a dag_run.conf dictionary with event attributes that specify # the information about the GCS object that triggered this DAG. # We extract the bucket and object name from this dictionary. source_bucket = models.Variable.get('gcp_input_location') source_object = models.Variable.get('gcp_input_location')+'/usa_names.csv' completion_ds = kwargs['ds'] target_object = os.path.join(target_infix, completion_ds, source_object) logging.info('Copying %s to %s', os.path.join(source_bucket, source_object), os.path.join(target_bucket, target_object)) conn.copy(source_bucket, source_object, target_bucket, target_object) logging.info('Deleting %s', os.path.join(source_bucket, source_object)) conn.delete(source_bucket, source_object)
[ "def", "move_to_completion_bucket", "(", "target_bucket", ",", "target_infix", ",", "*", "*", "kwargs", ")", ":", "# Here we establish a connection hook to GoogleCloudStorage.", "# Google Cloud Composer automatically provides a google_cloud_storage_default", "# connection id that is used by this hook.", "conn", "=", "gcs_hook", ".", "GoogleCloudStorageHook", "(", ")", "# The external trigger (Google Cloud Function) that initiates this DAG", "# provides a dag_run.conf dictionary with event attributes that specify", "# the information about the GCS object that triggered this DAG.", "# We extract the bucket and object name from this dictionary.", "source_bucket", "=", "models", ".", "Variable", ".", "get", "(", "'gcp_input_location'", ")", "source_object", "=", "models", ".", "Variable", ".", "get", "(", "'gcp_input_location'", ")", "+", "'/usa_names.csv'", "completion_ds", "=", "kwargs", "[", "'ds'", "]", "target_object", "=", "os", ".", "path", ".", "join", "(", "target_infix", ",", "completion_ds", ",", "source_object", ")", "logging", ".", "info", "(", "'Copying %s to %s'", ",", "os", ".", "path", ".", "join", "(", "source_bucket", ",", "source_object", ")", ",", "os", ".", "path", ".", "join", "(", "target_bucket", ",", "target_object", ")", ")", "conn", ".", "copy", "(", "source_bucket", ",", "source_object", ",", "target_bucket", ",", "target_object", ")", "logging", ".", "info", "(", "'Deleting %s'", ",", "os", ".", "path", ".", "join", "(", "source_bucket", ",", "source_object", ")", ")", "conn", ".", "delete", "(", "source_bucket", ",", "source_object", ")" ]
[ 79, 0 ]
[ 103, 45 ]
python
en
['en', 'en', 'en']
True
Marker.evaluate
(self, environment=None)
Evaluate a marker. Return the boolean from evaluating the given marker against the environment. environment is an optional argument to override all or part of the determined environment. The environment is determined from the current Python process.
Evaluate a marker.
def evaluate(self, environment=None): """Evaluate a marker. Return the boolean from evaluating the given marker against the environment. environment is an optional argument to override all or part of the determined environment. The environment is determined from the current Python process. """ current_environment = default_environment() if environment is not None: current_environment.update(environment) return _evaluate_markers(self._markers, current_environment)
[ "def", "evaluate", "(", "self", ",", "environment", "=", "None", ")", ":", "current_environment", "=", "default_environment", "(", ")", "if", "environment", "is", "not", "None", ":", "current_environment", ".", "update", "(", "environment", ")", "return", "_evaluate_markers", "(", "self", ".", "_markers", ",", "current_environment", ")" ]
[ 287, 4 ]
[ 300, 68 ]
python
en
['en', 'en', 'en']
True
validate_twilio_request
(f)
Validates that incoming requests genuinely originated from Twilio
Validates that incoming requests genuinely originated from Twilio
def validate_twilio_request(f): """Validates that incoming requests genuinely originated from Twilio""" @wraps(f) def decorated_function(*args, **kwargs): # Create an instance of the RequestValidator class validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN')) # Validate the request using its URL, POST data, # and X-TWILIO-SIGNATURE header request_valid = validator.validate( request.url, request.form, request.headers.get('X-TWILIO-SIGNATURE', '')) # Continue processing the request if it's valid (or if DEBUG is True) # and return a 403 error if it's not if request_valid or current_app.debug: return f(*args, **kwargs) else: return abort(403) return decorated_function
[ "def", "validate_twilio_request", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorated_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Create an instance of the RequestValidator class", "validator", "=", "RequestValidator", "(", "os", ".", "environ", ".", "get", "(", "'TWILIO_AUTH_TOKEN'", ")", ")", "# Validate the request using its URL, POST data,", "# and X-TWILIO-SIGNATURE header", "request_valid", "=", "validator", ".", "validate", "(", "request", ".", "url", ",", "request", ".", "form", ",", "request", ".", "headers", ".", "get", "(", "'X-TWILIO-SIGNATURE'", ",", "''", ")", ")", "# Continue processing the request if it's valid (or if DEBUG is True)", "# and return a 403 error if it's not", "if", "request_valid", "or", "current_app", ".", "debug", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "abort", "(", "403", ")", "return", "decorated_function" ]
[ 7, 0 ]
[ 27, 29 ]
python
en
['en', 'en', 'en']
True
BoxDef.rawdata_input_fn
(self, ref, ltg, griddef, ltgfcst = None)
Input function that yields example dicts for each box in grid.
Input function that yields example dicts for each box in grid.
def rawdata_input_fn(self, ref, ltg, griddef, ltgfcst = None): """Input function that yields example dicts for each box in grid.""" for cy, cx in self.get_prediction_grid_centers(ref): # restrict to grids where there is currently lightning in the area interesting = np.sum( ltg[cy - self.train_patch_radius:cy + self.train_patch_radius + 1, cx - self.train_patch_radius:cx + self.train_patch_radius + 1]) > 0.5 if interesting: label = (np.sum( ltgfcst[cy - self.label_patch_radius:cy + self.label_patch_radius + 1, cx - self.label_patch_radius:cx + self.label_patch_radius + 1]) > 0.5 if ltgfcst is not None else None) example = { 'cy': cy, 'cx': cx, 'lon': griddef.lons[cy][cx], 'lat': griddef.lats[cy][cx], 'ref_center': ref[cy][cx], 'ltg_center': ltg[cy][cx], 'ref_smallbox': ref[cy - self.label_patch_radius:cy + self.label_patch_radius + 1, cx - self.label_patch_radius:cx + self.label_patch_radius + 1], 'ref_bigbox': ref[cy - self.train_patch_radius:cy + self.train_patch_radius + 1, cx - self.train_patch_radius:cx + self.train_patch_radius + 1], 'ltg_smallbox': ltg[cy - self.label_patch_radius:cy + self.label_patch_radius + 1, cx - self.label_patch_radius:cx + self.label_patch_radius + 1], 'ltg_bigbox': ltg[cy - self.train_patch_radius:cy + self.train_patch_radius + 1, cx - self.train_patch_radius:cx + self.train_patch_radius + 1], 'has_ltg': label } yield example
[ "def", "rawdata_input_fn", "(", "self", ",", "ref", ",", "ltg", ",", "griddef", ",", "ltgfcst", "=", "None", ")", ":", "for", "cy", ",", "cx", "in", "self", ".", "get_prediction_grid_centers", "(", "ref", ")", ":", "# restrict to grids where there is currently lightning in the area", "interesting", "=", "np", ".", "sum", "(", "ltg", "[", "cy", "-", "self", ".", "train_patch_radius", ":", "cy", "+", "self", ".", "train_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "train_patch_radius", ":", "cx", "+", "self", ".", "train_patch_radius", "+", "1", "]", ")", ">", "0.5", "if", "interesting", ":", "label", "=", "(", "np", ".", "sum", "(", "ltgfcst", "[", "cy", "-", "self", ".", "label_patch_radius", ":", "cy", "+", "self", ".", "label_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "label_patch_radius", ":", "cx", "+", "self", ".", "label_patch_radius", "+", "1", "]", ")", ">", "0.5", "if", "ltgfcst", "is", "not", "None", "else", "None", ")", "example", "=", "{", "'cy'", ":", "cy", ",", "'cx'", ":", "cx", ",", "'lon'", ":", "griddef", ".", "lons", "[", "cy", "]", "[", "cx", "]", ",", "'lat'", ":", "griddef", ".", "lats", "[", "cy", "]", "[", "cx", "]", ",", "'ref_center'", ":", "ref", "[", "cy", "]", "[", "cx", "]", ",", "'ltg_center'", ":", "ltg", "[", "cy", "]", "[", "cx", "]", ",", "'ref_smallbox'", ":", "ref", "[", "cy", "-", "self", ".", "label_patch_radius", ":", "cy", "+", "self", ".", "label_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "label_patch_radius", ":", "cx", "+", "self", ".", "label_patch_radius", "+", "1", "]", ",", "'ref_bigbox'", ":", "ref", "[", "cy", "-", "self", ".", "train_patch_radius", ":", "cy", "+", "self", ".", "train_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "train_patch_radius", ":", "cx", "+", "self", ".", "train_patch_radius", "+", "1", "]", ",", "'ltg_smallbox'", ":", "ltg", "[", "cy", "-", "self", ".", "label_patch_radius", ":", "cy", "+", "self", ".", "label_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "label_patch_radius", ":", "cx", "+", "self", ".", "label_patch_radius", "+", "1", "]", ",", "'ltg_bigbox'", ":", "ltg", "[", "cy", "-", "self", ".", "train_patch_radius", ":", "cy", "+", "self", ".", "train_patch_radius", "+", "1", ",", "cx", "-", "self", ".", "train_patch_radius", ":", "cx", "+", "self", ".", "train_patch_radius", "+", "1", "]", ",", "'has_ltg'", ":", "label", "}", "yield", "example" ]
[ 36, 2 ]
[ 76, 21 ]
python
en
['en', 'en', 'en']
True
SequenceLikelihood.get_num_categories
(cls)
:returns: The number of likelihood categories in the enum.
:returns: The number of likelihood categories in the enum.
def get_num_categories(cls): """:returns: The number of likelihood categories in the enum.""" return 4
[ "def", "get_num_categories", "(", "cls", ")", ":", "return", "4" ]
[ 59, 4 ]
[ 61, 16 ]
python
en
['en', 'af', 'en']
True
Collector.add
(self, objs, source=None, nullable=False, reverse_dependency=False)
Add 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null. Return a list of all objects that were not already collected.
Add 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null.
def add(self, objs, source=None, nullable=False, reverse_dependency=False): """ Add 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null. Return a list of all objects that were not already collected. """ if not objs: return [] new_objs = [] model = objs[0].__class__ instances = self.data[model] for obj in objs: if obj not in instances: new_objs.append(obj) instances.update(new_objs) # Nullable relationships can be ignored -- they are nulled out before # deleting, and therefore do not affect the order in which objects have # to be deleted. if source is not None and not nullable: self.add_dependency(source, model, reverse_dependency=reverse_dependency) return new_objs
[ "def", "add", "(", "self", ",", "objs", ",", "source", "=", "None", ",", "nullable", "=", "False", ",", "reverse_dependency", "=", "False", ")", ":", "if", "not", "objs", ":", "return", "[", "]", "new_objs", "=", "[", "]", "model", "=", "objs", "[", "0", "]", ".", "__class__", "instances", "=", "self", ".", "data", "[", "model", "]", "for", "obj", "in", "objs", ":", "if", "obj", "not", "in", "instances", ":", "new_objs", ".", "append", "(", "obj", ")", "instances", ".", "update", "(", "new_objs", ")", "# Nullable relationships can be ignored -- they are nulled out before", "# deleting, and therefore do not affect the order in which objects have", "# to be deleted.", "if", "source", "is", "not", "None", "and", "not", "nullable", ":", "self", ".", "add_dependency", "(", "source", ",", "model", ",", "reverse_dependency", "=", "reverse_dependency", ")", "return", "new_objs" ]
[ 98, 4 ]
[ 120, 23 ]
python
en
['en', 'error', 'th']
False
Collector.add_field_update
(self, field, value, objs)
Schedule a field update. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet).
Schedule a field update. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet).
def add_field_update(self, field, value, objs): """ Schedule a field update. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). """ if not objs: return model = objs[0].__class__ self.field_updates[model][field, value].update(objs)
[ "def", "add_field_update", "(", "self", ",", "field", ",", "value", ",", "objs", ")", ":", "if", "not", "objs", ":", "return", "model", "=", "objs", "[", "0", "]", ".", "__class__", "self", ".", "field_updates", "[", "model", "]", "[", "field", ",", "value", "]", ".", "update", "(", "objs", ")" ]
[ 128, 4 ]
[ 136, 60 ]
python
en
['en', 'error', 'th']
False
Collector.can_fast_delete
(self, objs, from_field=None)
Determine if the objects in the given queryset-like or single object can be fast-deleted. This can be done if there are no cascades, no parents and no signal listeners for the object class. The 'from_field' tells where we are coming from - we need this to determine if the objects are in fact to be deleted. Allow also skipping parent -> child -> parent chain preventing fast delete of the child.
Determine if the objects in the given queryset-like or single object can be fast-deleted. This can be done if there are no cascades, no parents and no signal listeners for the object class.
def can_fast_delete(self, objs, from_field=None): """ Determine if the objects in the given queryset-like or single object can be fast-deleted. This can be done if there are no cascades, no parents and no signal listeners for the object class. The 'from_field' tells where we are coming from - we need this to determine if the objects are in fact to be deleted. Allow also skipping parent -> child -> parent chain preventing fast delete of the child. """ if from_field and from_field.remote_field.on_delete is not CASCADE: return False if hasattr(objs, '_meta'): model = objs._meta.model elif hasattr(objs, 'model') and hasattr(objs, '_raw_delete'): model = objs.model else: return False if self._has_signal_listeners(model): return False # The use of from_field comes from the need to avoid cascade back to # parent when parent delete is cascading to child. opts = model._meta return ( all(link == from_field for link in opts.concrete_model._meta.parents.values()) and # Foreign keys pointing to this model. all( related.field.remote_field.on_delete is DO_NOTHING for related in get_candidate_relations_to_delete(opts) ) and ( # Something like generic foreign key. not any(hasattr(field, 'bulk_related_objects') for field in opts.private_fields) ) )
[ "def", "can_fast_delete", "(", "self", ",", "objs", ",", "from_field", "=", "None", ")", ":", "if", "from_field", "and", "from_field", ".", "remote_field", ".", "on_delete", "is", "not", "CASCADE", ":", "return", "False", "if", "hasattr", "(", "objs", ",", "'_meta'", ")", ":", "model", "=", "objs", ".", "_meta", ".", "model", "elif", "hasattr", "(", "objs", ",", "'model'", ")", "and", "hasattr", "(", "objs", ",", "'_raw_delete'", ")", ":", "model", "=", "objs", ".", "model", "else", ":", "return", "False", "if", "self", ".", "_has_signal_listeners", "(", "model", ")", ":", "return", "False", "# The use of from_field comes from the need to avoid cascade back to", "# parent when parent delete is cascading to child.", "opts", "=", "model", ".", "_meta", "return", "(", "all", "(", "link", "==", "from_field", "for", "link", "in", "opts", ".", "concrete_model", ".", "_meta", ".", "parents", ".", "values", "(", ")", ")", "and", "# Foreign keys pointing to this model.", "all", "(", "related", ".", "field", ".", "remote_field", ".", "on_delete", "is", "DO_NOTHING", "for", "related", "in", "get_candidate_relations_to_delete", "(", "opts", ")", ")", "and", "(", "# Something like generic foreign key.", "not", "any", "(", "hasattr", "(", "field", ",", "'bulk_related_objects'", ")", "for", "field", "in", "opts", ".", "private_fields", ")", ")", ")" ]
[ 164, 4 ]
[ 198, 9 ]
python
en
['en', 'error', 'th']
False
Collector.get_del_batches
(self, objs, fields)
Return the objs in suitably sized batches for the used connection.
Return the objs in suitably sized batches for the used connection.
def get_del_batches(self, objs, fields): """ Return the objs in suitably sized batches for the used connection. """ field_names = [field.name for field in fields] conn_batch_size = max( connections[self.using].ops.bulk_batch_size(field_names, objs), 1) if len(objs) > conn_batch_size: return [objs[i:i + conn_batch_size] for i in range(0, len(objs), conn_batch_size)] else: return [objs]
[ "def", "get_del_batches", "(", "self", ",", "objs", ",", "fields", ")", ":", "field_names", "=", "[", "field", ".", "name", "for", "field", "in", "fields", "]", "conn_batch_size", "=", "max", "(", "connections", "[", "self", ".", "using", "]", ".", "ops", ".", "bulk_batch_size", "(", "field_names", ",", "objs", ")", ",", "1", ")", "if", "len", "(", "objs", ")", ">", "conn_batch_size", ":", "return", "[", "objs", "[", "i", ":", "i", "+", "conn_batch_size", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "objs", ")", ",", "conn_batch_size", ")", "]", "else", ":", "return", "[", "objs", "]" ]
[ 200, 4 ]
[ 211, 25 ]
python
en
['en', 'error', 'th']
False
Collector.collect
(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False, fail_on_restricted=True)
Add 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. If 'reverse_dependency' is True, 'source' will be deleted before the current model, rather than after. (Needed for cascading to parent models, the one case in which the cascade follows the forwards direction of an FK rather than the reverse direction.) If 'keep_parents' is True, data of parent model's will be not deleted. If 'fail_on_restricted' is False, error won't be raised even if it's prohibited to delete such objects due to RESTRICT, that defers restricted object checking in recursive calls where the top-level call may need to collect more objects to determine whether restricted ones can be deleted.
Add 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler.
def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False, fail_on_restricted=True): """ Add 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogeneous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. If 'reverse_dependency' is True, 'source' will be deleted before the current model, rather than after. (Needed for cascading to parent models, the one case in which the cascade follows the forwards direction of an FK rather than the reverse direction.) If 'keep_parents' is True, data of parent model's will be not deleted. If 'fail_on_restricted' is False, error won't be raised even if it's prohibited to delete such objects due to RESTRICT, that defers restricted object checking in recursive calls where the top-level call may need to collect more objects to determine whether restricted ones can be deleted. """ if self.can_fast_delete(objs): self.fast_deletes.append(objs) return new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) if not new_objs: return model = new_objs[0].__class__ if not keep_parents: # Recursively collect concrete model's parent models, but not their # related objects. These will be found by meta.get_fields() concrete_model = model._meta.concrete_model for ptr in concrete_model._meta.parents.values(): if ptr: parent_objs = [getattr(obj, ptr.name) for obj in new_objs] self.collect(parent_objs, source=model, source_attr=ptr.remote_field.related_name, collect_related=False, reverse_dependency=True, fail_on_restricted=False) if not collect_related: return if keep_parents: parents = set(model._meta.get_parent_list()) model_fast_deletes = defaultdict(list) protected_objects = defaultdict(list) for related in get_candidate_relations_to_delete(model._meta): # Preserve parent reverse relationships if keep_parents=True. if keep_parents and related.model in parents: continue field = related.field if field.remote_field.on_delete == DO_NOTHING: continue related_model = related.related_model if self.can_fast_delete(related_model, from_field=field): model_fast_deletes[related_model].append(field) continue batches = self.get_del_batches(new_objs, [field]) for batch in batches: sub_objs = self.related_objects(related_model, [field], batch) # Non-referenced fields can be deferred if no signal receivers # are connected for the related model as they'll never be # exposed to the user. Skip field deferring when some # relationships are select_related as interactions between both # features are hard to get right. This should only happen in # the rare cases where .related_objects is overridden anyway. if not (sub_objs.query.select_related or self._has_signal_listeners(related_model)): referenced_fields = set(chain.from_iterable( (rf.attname for rf in rel.field.foreign_related_fields) for rel in get_candidate_relations_to_delete(related_model._meta) )) sub_objs = sub_objs.only(*tuple(referenced_fields)) if sub_objs: try: field.remote_field.on_delete(self, field, sub_objs, self.using) except ProtectedError as error: key = "'%s.%s'" % (field.model.__name__, field.name) protected_objects[key] += error.protected_objects if protected_objects: raise ProtectedError( 'Cannot delete some instances of model %r because they are ' 'referenced through protected foreign keys: %s.' % ( model.__name__, ', '.join(protected_objects), ), set(chain.from_iterable(protected_objects.values())), ) for related_model, related_fields in model_fast_deletes.items(): batches = self.get_del_batches(new_objs, related_fields) for batch in batches: sub_objs = self.related_objects(related_model, related_fields, batch) self.fast_deletes.append(sub_objs) for field in model._meta.private_fields: if hasattr(field, 'bulk_related_objects'): # It's something like generic foreign key. sub_objs = field.bulk_related_objects(new_objs, self.using) self.collect(sub_objs, source=model, nullable=True, fail_on_restricted=False) if fail_on_restricted: # Raise an error if collected restricted objects (RESTRICT) aren't # candidates for deletion also collected via CASCADE. for related_model, instances in self.data.items(): self.clear_restricted_objects_from_set(related_model, instances) for qs in self.fast_deletes: self.clear_restricted_objects_from_queryset(qs.model, qs) if self.restricted_objects.values(): restricted_objects = defaultdict(list) for related_model, fields in self.restricted_objects.items(): for field, objs in fields.items(): if objs: key = "'%s.%s'" % (related_model.__name__, field.name) restricted_objects[key] += objs if restricted_objects: raise RestrictedError( 'Cannot delete some instances of model %r because ' 'they are referenced through restricted foreign keys: ' '%s.' % ( model.__name__, ', '.join(restricted_objects), ), set(chain.from_iterable(restricted_objects.values())), )
[ "def", "collect", "(", "self", ",", "objs", ",", "source", "=", "None", ",", "nullable", "=", "False", ",", "collect_related", "=", "True", ",", "source_attr", "=", "None", ",", "reverse_dependency", "=", "False", ",", "keep_parents", "=", "False", ",", "fail_on_restricted", "=", "True", ")", ":", "if", "self", ".", "can_fast_delete", "(", "objs", ")", ":", "self", ".", "fast_deletes", ".", "append", "(", "objs", ")", "return", "new_objs", "=", "self", ".", "add", "(", "objs", ",", "source", ",", "nullable", ",", "reverse_dependency", "=", "reverse_dependency", ")", "if", "not", "new_objs", ":", "return", "model", "=", "new_objs", "[", "0", "]", ".", "__class__", "if", "not", "keep_parents", ":", "# Recursively collect concrete model's parent models, but not their", "# related objects. These will be found by meta.get_fields()", "concrete_model", "=", "model", ".", "_meta", ".", "concrete_model", "for", "ptr", "in", "concrete_model", ".", "_meta", ".", "parents", ".", "values", "(", ")", ":", "if", "ptr", ":", "parent_objs", "=", "[", "getattr", "(", "obj", ",", "ptr", ".", "name", ")", "for", "obj", "in", "new_objs", "]", "self", ".", "collect", "(", "parent_objs", ",", "source", "=", "model", ",", "source_attr", "=", "ptr", ".", "remote_field", ".", "related_name", ",", "collect_related", "=", "False", ",", "reverse_dependency", "=", "True", ",", "fail_on_restricted", "=", "False", ")", "if", "not", "collect_related", ":", "return", "if", "keep_parents", ":", "parents", "=", "set", "(", "model", ".", "_meta", ".", "get_parent_list", "(", ")", ")", "model_fast_deletes", "=", "defaultdict", "(", "list", ")", "protected_objects", "=", "defaultdict", "(", "list", ")", "for", "related", "in", "get_candidate_relations_to_delete", "(", "model", ".", "_meta", ")", ":", "# Preserve parent reverse relationships if keep_parents=True.", "if", "keep_parents", "and", "related", ".", "model", "in", "parents", ":", "continue", "field", "=", "related", ".", "field", "if", "field", ".", "remote_field", ".", "on_delete", "==", "DO_NOTHING", ":", "continue", "related_model", "=", "related", ".", "related_model", "if", "self", ".", "can_fast_delete", "(", "related_model", ",", "from_field", "=", "field", ")", ":", "model_fast_deletes", "[", "related_model", "]", ".", "append", "(", "field", ")", "continue", "batches", "=", "self", ".", "get_del_batches", "(", "new_objs", ",", "[", "field", "]", ")", "for", "batch", "in", "batches", ":", "sub_objs", "=", "self", ".", "related_objects", "(", "related_model", ",", "[", "field", "]", ",", "batch", ")", "# Non-referenced fields can be deferred if no signal receivers", "# are connected for the related model as they'll never be", "# exposed to the user. Skip field deferring when some", "# relationships are select_related as interactions between both", "# features are hard to get right. This should only happen in", "# the rare cases where .related_objects is overridden anyway.", "if", "not", "(", "sub_objs", ".", "query", ".", "select_related", "or", "self", ".", "_has_signal_listeners", "(", "related_model", ")", ")", ":", "referenced_fields", "=", "set", "(", "chain", ".", "from_iterable", "(", "(", "rf", ".", "attname", "for", "rf", "in", "rel", ".", "field", ".", "foreign_related_fields", ")", "for", "rel", "in", "get_candidate_relations_to_delete", "(", "related_model", ".", "_meta", ")", ")", ")", "sub_objs", "=", "sub_objs", ".", "only", "(", "*", "tuple", "(", "referenced_fields", ")", ")", "if", "sub_objs", ":", "try", ":", "field", ".", "remote_field", ".", "on_delete", "(", "self", ",", "field", ",", "sub_objs", ",", "self", ".", "using", ")", "except", "ProtectedError", "as", "error", ":", "key", "=", "\"'%s.%s'\"", "%", "(", "field", ".", "model", ".", "__name__", ",", "field", ".", "name", ")", "protected_objects", "[", "key", "]", "+=", "error", ".", "protected_objects", "if", "protected_objects", ":", "raise", "ProtectedError", "(", "'Cannot delete some instances of model %r because they are '", "'referenced through protected foreign keys: %s.'", "%", "(", "model", ".", "__name__", ",", "', '", ".", "join", "(", "protected_objects", ")", ",", ")", ",", "set", "(", "chain", ".", "from_iterable", "(", "protected_objects", ".", "values", "(", ")", ")", ")", ",", ")", "for", "related_model", ",", "related_fields", "in", "model_fast_deletes", ".", "items", "(", ")", ":", "batches", "=", "self", ".", "get_del_batches", "(", "new_objs", ",", "related_fields", ")", "for", "batch", "in", "batches", ":", "sub_objs", "=", "self", ".", "related_objects", "(", "related_model", ",", "related_fields", ",", "batch", ")", "self", ".", "fast_deletes", ".", "append", "(", "sub_objs", ")", "for", "field", "in", "model", ".", "_meta", ".", "private_fields", ":", "if", "hasattr", "(", "field", ",", "'bulk_related_objects'", ")", ":", "# It's something like generic foreign key.", "sub_objs", "=", "field", ".", "bulk_related_objects", "(", "new_objs", ",", "self", ".", "using", ")", "self", ".", "collect", "(", "sub_objs", ",", "source", "=", "model", ",", "nullable", "=", "True", ",", "fail_on_restricted", "=", "False", ")", "if", "fail_on_restricted", ":", "# Raise an error if collected restricted objects (RESTRICT) aren't", "# candidates for deletion also collected via CASCADE.", "for", "related_model", ",", "instances", "in", "self", ".", "data", ".", "items", "(", ")", ":", "self", ".", "clear_restricted_objects_from_set", "(", "related_model", ",", "instances", ")", "for", "qs", "in", "self", ".", "fast_deletes", ":", "self", ".", "clear_restricted_objects_from_queryset", "(", "qs", ".", "model", ",", "qs", ")", "if", "self", ".", "restricted_objects", ".", "values", "(", ")", ":", "restricted_objects", "=", "defaultdict", "(", "list", ")", "for", "related_model", ",", "fields", "in", "self", ".", "restricted_objects", ".", "items", "(", ")", ":", "for", "field", ",", "objs", "in", "fields", ".", "items", "(", ")", ":", "if", "objs", ":", "key", "=", "\"'%s.%s'\"", "%", "(", "related_model", ".", "__name__", ",", "field", ".", "name", ")", "restricted_objects", "[", "key", "]", "+=", "objs", "if", "restricted_objects", ":", "raise", "RestrictedError", "(", "'Cannot delete some instances of model %r because '", "'they are referenced through restricted foreign keys: '", "'%s.'", "%", "(", "model", ".", "__name__", ",", "', '", ".", "join", "(", "restricted_objects", ")", ",", ")", ",", "set", "(", "chain", ".", "from_iterable", "(", "restricted_objects", ".", "values", "(", ")", ")", ")", ",", ")" ]
[ 213, 4 ]
[ 343, 21 ]
python
en
['en', 'error', 'th']
False
Collector.related_objects
(self, related_model, related_fields, objs)
Get a QuerySet of the related model to objs via related fields.
Get a QuerySet of the related model to objs via related fields.
def related_objects(self, related_model, related_fields, objs): """ Get a QuerySet of the related model to objs via related fields. """ predicate = reduce(operator.or_, ( query_utils.Q(**{'%s__in' % related_field.name: objs}) for related_field in related_fields )) return related_model._base_manager.using(self.using).filter(predicate)
[ "def", "related_objects", "(", "self", ",", "related_model", ",", "related_fields", ",", "objs", ")", ":", "predicate", "=", "reduce", "(", "operator", ".", "or_", ",", "(", "query_utils", ".", "Q", "(", "*", "*", "{", "'%s__in'", "%", "related_field", ".", "name", ":", "objs", "}", ")", "for", "related_field", "in", "related_fields", ")", ")", "return", "related_model", ".", "_base_manager", ".", "using", "(", "self", ".", "using", ")", ".", "filter", "(", "predicate", ")" ]
[ 345, 4 ]
[ 353, 78 ]
python
en
['en', 'error', 'th']
False
Request.max_content_length
(self)
Read-only view of the ``MAX_CONTENT_LENGTH`` config key.
Read-only view of the ``MAX_CONTENT_LENGTH`` config key.
def max_content_length(self): """Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" ctx = _request_ctx_stack.top if ctx is not None: return ctx.app.config['MAX_CONTENT_LENGTH']
[ "def", "max_content_length", "(", "self", ")", ":", "ctx", "=", "_request_ctx_stack", ".", "top", "if", "ctx", "is", "not", "None", ":", "return", "ctx", ".", "app", ".", "config", "[", "'MAX_CONTENT_LENGTH'", "]" ]
[ 62, 4 ]
[ 66, 55 ]
python
en
['en', 'en', 'en']
True
Request.endpoint
(self)
The endpoint that matched the request. This in combination with :attr:`view_args` can be used to reconstruct the same or a modified URL. If an exception happened when matching, this will be ``None``.
The endpoint that matched the request. This in combination with :attr:`view_args` can be used to reconstruct the same or a modified URL. If an exception happened when matching, this will be ``None``.
def endpoint(self): """The endpoint that matched the request. This in combination with :attr:`view_args` can be used to reconstruct the same or a modified URL. If an exception happened when matching, this will be ``None``. """ if self.url_rule is not None: return self.url_rule.endpoint
[ "def", "endpoint", "(", "self", ")", ":", "if", "self", ".", "url_rule", "is", "not", "None", ":", "return", "self", ".", "url_rule", ".", "endpoint" ]
[ 69, 4 ]
[ 76, 41 ]
python
en
['en', 'en', 'en']
True
Request.module
(self)
The name of the current module if the request was dispatched to an actual module. This is deprecated functionality, use blueprints instead.
The name of the current module if the request was dispatched to an actual module. This is deprecated functionality, use blueprints instead.
def module(self): """The name of the current module if the request was dispatched to an actual module. This is deprecated functionality, use blueprints instead. """ from warnings import warn warn(DeprecationWarning('modules were deprecated in favor of ' 'blueprints. Use request.blueprint ' 'instead.'), stacklevel=2) if self._is_old_module: return self.blueprint
[ "def", "module", "(", "self", ")", ":", "from", "warnings", "import", "warn", "warn", "(", "DeprecationWarning", "(", "'modules were deprecated in favor of '", "'blueprints. Use request.blueprint '", "'instead.'", ")", ",", "stacklevel", "=", "2", ")", "if", "self", ".", "_is_old_module", ":", "return", "self", ".", "blueprint" ]
[ 79, 4 ]
[ 89, 33 ]
python
en
['en', 'en', 'en']
True
Request.blueprint
(self)
The name of the current blueprint
The name of the current blueprint
def blueprint(self): """The name of the current blueprint""" if self.url_rule and '.' in self.url_rule.endpoint: return self.url_rule.endpoint.rsplit('.', 1)[0]
[ "def", "blueprint", "(", "self", ")", ":", "if", "self", ".", "url_rule", "and", "'.'", "in", "self", ".", "url_rule", ".", "endpoint", ":", "return", "self", ".", "url_rule", ".", "endpoint", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]" ]
[ 92, 4 ]
[ 95, 59 ]
python
en
['en', 'en', 'en']
True
Request.json
(self)
If the mimetype is :mimetype:`application/json` this will contain the parsed JSON data. Otherwise this will be ``None``. The :meth:`get_json` method should be used instead.
If the mimetype is :mimetype:`application/json` this will contain the parsed JSON data. Otherwise this will be ``None``.
def json(self): """If the mimetype is :mimetype:`application/json` this will contain the parsed JSON data. Otherwise this will be ``None``. The :meth:`get_json` method should be used instead. """ from warnings import warn warn(DeprecationWarning('json is deprecated. ' 'Use get_json() instead.'), stacklevel=2) return self.get_json()
[ "def", "json", "(", "self", ")", ":", "from", "warnings", "import", "warn", "warn", "(", "DeprecationWarning", "(", "'json is deprecated. '", "'Use get_json() instead.'", ")", ",", "stacklevel", "=", "2", ")", "return", "self", ".", "get_json", "(", ")" ]
[ 98, 4 ]
[ 107, 30 ]
python
en
['en', 'en', 'en']
True
Request.is_json
(self)
Indicates if this request is JSON or not. By default a request is considered to include JSON data if the mimetype is :mimetype:`application/json` or :mimetype:`application/*+json`. .. versionadded:: 0.11
Indicates if this request is JSON or not. By default a request is considered to include JSON data if the mimetype is :mimetype:`application/json` or :mimetype:`application/*+json`.
def is_json(self): """Indicates if this request is JSON or not. By default a request is considered to include JSON data if the mimetype is :mimetype:`application/json` or :mimetype:`application/*+json`. .. versionadded:: 0.11 """ mt = self.mimetype if mt == 'application/json': return True if mt.startswith('application/') and mt.endswith('+json'): return True return False
[ "def", "is_json", "(", "self", ")", ":", "mt", "=", "self", ".", "mimetype", "if", "mt", "==", "'application/json'", ":", "return", "True", "if", "mt", ".", "startswith", "(", "'application/'", ")", "and", "mt", ".", "endswith", "(", "'+json'", ")", ":", "return", "True", "return", "False" ]
[ 110, 4 ]
[ 122, 20 ]
python
en
['en', 'en', 'en']
True
Request.get_json
(self, force=False, silent=False, cache=True)
Parses the incoming JSON request data and returns it. By default this function will return ``None`` if the mimetype is not :mimetype:`application/json` but this can be overridden by the ``force`` parameter. If parsing fails the :meth:`on_json_loading_failed` method on the request object will be invoked. :param force: if set to ``True`` the mimetype is ignored. :param silent: if set to ``True`` this method will fail silently and return ``None``. :param cache: if set to ``True`` the parsed JSON data is remembered on the request.
Parses the incoming JSON request data and returns it. By default this function will return ``None`` if the mimetype is not :mimetype:`application/json` but this can be overridden by the ``force`` parameter. If parsing fails the :meth:`on_json_loading_failed` method on the request object will be invoked.
def get_json(self, force=False, silent=False, cache=True): """Parses the incoming JSON request data and returns it. By default this function will return ``None`` if the mimetype is not :mimetype:`application/json` but this can be overridden by the ``force`` parameter. If parsing fails the :meth:`on_json_loading_failed` method on the request object will be invoked. :param force: if set to ``True`` the mimetype is ignored. :param silent: if set to ``True`` this method will fail silently and return ``None``. :param cache: if set to ``True`` the parsed JSON data is remembered on the request. """ rv = getattr(self, '_cached_json', _missing) # We return cached JSON only when the cache is enabled. if cache and rv is not _missing: return rv if not (force or self.is_json): return None # We accept a request charset against the specification as # certain clients have been using this in the past. This # fits our general approach of being nice in what we accept # and strict in what we send out. request_charset = self.mimetype_params.get('charset') try: data = _get_data(self, cache) if request_charset is not None: rv = json.loads(data, encoding=request_charset) else: rv = json.loads(data) except ValueError as e: if silent: rv = None else: rv = self.on_json_loading_failed(e) if cache: self._cached_json = rv return rv
[ "def", "get_json", "(", "self", ",", "force", "=", "False", ",", "silent", "=", "False", ",", "cache", "=", "True", ")", ":", "rv", "=", "getattr", "(", "self", ",", "'_cached_json'", ",", "_missing", ")", "# We return cached JSON only when the cache is enabled.", "if", "cache", "and", "rv", "is", "not", "_missing", ":", "return", "rv", "if", "not", "(", "force", "or", "self", ".", "is_json", ")", ":", "return", "None", "# We accept a request charset against the specification as", "# certain clients have been using this in the past. This", "# fits our general approach of being nice in what we accept", "# and strict in what we send out.", "request_charset", "=", "self", ".", "mimetype_params", ".", "get", "(", "'charset'", ")", "try", ":", "data", "=", "_get_data", "(", "self", ",", "cache", ")", "if", "request_charset", "is", "not", "None", ":", "rv", "=", "json", ".", "loads", "(", "data", ",", "encoding", "=", "request_charset", ")", "else", ":", "rv", "=", "json", ".", "loads", "(", "data", ")", "except", "ValueError", "as", "e", ":", "if", "silent", ":", "rv", "=", "None", "else", ":", "rv", "=", "self", ".", "on_json_loading_failed", "(", "e", ")", "if", "cache", ":", "self", ".", "_cached_json", "=", "rv", "return", "rv" ]
[ 124, 4 ]
[ 164, 17 ]
python
en
['en', 'en', 'en']
True
Request.on_json_loading_failed
(self, e)
Called if decoding of the JSON data failed. The return value of this method is used by :meth:`get_json` when an error occurred. The default implementation just raises a :class:`BadRequest` exception. .. versionchanged:: 0.10 Removed buggy previous behavior of generating a random JSON response. If you want that behavior back you can trivially add it by subclassing. .. versionadded:: 0.8
Called if decoding of the JSON data failed. The return value of this method is used by :meth:`get_json` when an error occurred. The default implementation just raises a :class:`BadRequest` exception.
def on_json_loading_failed(self, e): """Called if decoding of the JSON data failed. The return value of this method is used by :meth:`get_json` when an error occurred. The default implementation just raises a :class:`BadRequest` exception. .. versionchanged:: 0.10 Removed buggy previous behavior of generating a random JSON response. If you want that behavior back you can trivially add it by subclassing. .. versionadded:: 0.8 """ ctx = _request_ctx_stack.top if ctx is not None and ctx.app.config.get('DEBUG', False): raise BadRequest('Failed to decode JSON object: {0}'.format(e)) raise BadRequest()
[ "def", "on_json_loading_failed", "(", "self", ",", "e", ")", ":", "ctx", "=", "_request_ctx_stack", ".", "top", "if", "ctx", "is", "not", "None", "and", "ctx", ".", "app", ".", "config", ".", "get", "(", "'DEBUG'", ",", "False", ")", ":", "raise", "BadRequest", "(", "'Failed to decode JSON object: {0}'", ".", "format", "(", "e", ")", ")", "raise", "BadRequest", "(", ")" ]
[ 166, 4 ]
[ 181, 26 ]
python
en
['en', 'en', 'en']
True
UniversalDetector.reset
(self)
Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents.
Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents.
def reset(self): """ Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents. """ self.result = {'encoding': None, 'confidence': 0.0, 'language': None} self.done = False self._got_data = False self._has_win_bytes = False self._input_state = InputState.PURE_ASCII self._last_char = b'' if self._esc_charset_prober: self._esc_charset_prober.reset() for prober in self._charset_probers: prober.reset()
[ "def", "reset", "(", "self", ")", ":", "self", ".", "result", "=", "{", "'encoding'", ":", "None", ",", "'confidence'", ":", "0.0", ",", "'language'", ":", "None", "}", "self", ".", "done", "=", "False", "self", ".", "_got_data", "=", "False", "self", ".", "_has_win_bytes", "=", "False", "self", ".", "_input_state", "=", "InputState", ".", "PURE_ASCII", "self", ".", "_last_char", "=", "b''", "if", "self", ".", "_esc_charset_prober", ":", "self", ".", "_esc_charset_prober", ".", "reset", "(", ")", "for", "prober", "in", "self", ".", "_charset_probers", ":", "prober", ".", "reset", "(", ")" ]
[ 93, 4 ]
[ 108, 26 ]
python
en
['en', 'error', 'th']
False
UniversalDetector.feed
(self, byte_str)
Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``.
Takes a chunk of a document and feeds it through all of the relevant charset probers.
def feed(self, byte_str): """ Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``. """ if self.done: return if not len(byte_str): return if not isinstance(byte_str, bytearray): byte_str = bytearray(byte_str) # First check for known BOMs, since these are guaranteed to be correct if not self._got_data: # If the data starts with BOM, we know it is UTF if byte_str.startswith(codecs.BOM_UTF8): # EF BB BF UTF-8 with BOM self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): # FF FE 00 00 UTF-32, little-endian BOM # 00 00 FE FF UTF-32, big-endian BOM self.result = {'encoding': "UTF-32", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\xFE\xFF\x00\x00'): # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = {'encoding': "X-ISO-10646-UCS-4-3412", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\x00\x00\xFF\xFE'): # 00 00 FF FE UCS-4, unusual octet order BOM (2143) self.result = {'encoding': "X-ISO-10646-UCS-4-2143", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): # FF FE UTF-16, little endian BOM # FE FF UTF-16, big endian BOM self.result = {'encoding': "UTF-16", 'confidence': 1.0, 'language': ''} self._got_data = True if self.result['encoding'] is not None: self.done = True return # If none of those matched and we've only see ASCII so far, check # for high bytes and escape sequences if self._input_state == InputState.PURE_ASCII: if self.HIGH_BYTE_DETECTOR.search(byte_str): self._input_state = InputState.HIGH_BYTE elif self._input_state == InputState.PURE_ASCII and \ self.ESC_DETECTOR.search(self._last_char + byte_str): self._input_state = InputState.ESC_ASCII self._last_char = byte_str[-1:] # If we've seen escape sequences, use the EscCharSetProber, which # uses a simple state machine to check for known escape sequences in # HZ and ISO-2022 encodings, since those are the only encodings that # use such sequences. if self._input_state == InputState.ESC_ASCII: if not self._esc_charset_prober: self._esc_charset_prober = EscCharSetProber(self.lang_filter) if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': self._esc_charset_prober.charset_name, 'confidence': self._esc_charset_prober.get_confidence(), 'language': self._esc_charset_prober.language} self.done = True # If we've seen high bytes (i.e., those with values greater than 127), # we need to do more complicated checks using all our multi-byte and # single-byte probers that are left. The single-byte probers # use character bigram distributions to determine the encoding, whereas # the multi-byte probers use a combination of character unigram and # bigram distributions. elif self._input_state == InputState.HIGH_BYTE: if not self._charset_probers: self._charset_probers = [MBCSGroupProber(self.lang_filter)] # If we're checking non-CJK encodings, use single-byte prober if self.lang_filter & LanguageFilter.NON_CJK: self._charset_probers.append(SBCSGroupProber()) self._charset_probers.append(Latin1Prober()) for prober in self._charset_probers: if prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': prober.charset_name, 'confidence': prober.get_confidence(), 'language': prober.language} self.done = True break if self.WIN_BYTE_DETECTOR.search(byte_str): self._has_win_bytes = True
[ "def", "feed", "(", "self", ",", "byte_str", ")", ":", "if", "self", ".", "done", ":", "return", "if", "not", "len", "(", "byte_str", ")", ":", "return", "if", "not", "isinstance", "(", "byte_str", ",", "bytearray", ")", ":", "byte_str", "=", "bytearray", "(", "byte_str", ")", "# First check for known BOMs, since these are guaranteed to be correct", "if", "not", "self", ".", "_got_data", ":", "# If the data starts with BOM, we know it is UTF", "if", "byte_str", ".", "startswith", "(", "codecs", ".", "BOM_UTF8", ")", ":", "# EF BB BF UTF-8 with BOM", "self", ".", "result", "=", "{", "'encoding'", ":", "\"UTF-8-SIG\"", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "elif", "byte_str", ".", "startswith", "(", "(", "codecs", ".", "BOM_UTF32_LE", ",", "codecs", ".", "BOM_UTF32_BE", ")", ")", ":", "# FF FE 00 00 UTF-32, little-endian BOM", "# 00 00 FE FF UTF-32, big-endian BOM", "self", ".", "result", "=", "{", "'encoding'", ":", "\"UTF-32\"", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "elif", "byte_str", ".", "startswith", "(", "b'\\xFE\\xFF\\x00\\x00'", ")", ":", "# FE FF 00 00 UCS-4, unusual octet order BOM (3412)", "self", ".", "result", "=", "{", "'encoding'", ":", "\"X-ISO-10646-UCS-4-3412\"", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "elif", "byte_str", ".", "startswith", "(", "b'\\x00\\x00\\xFF\\xFE'", ")", ":", "# 00 00 FF FE UCS-4, unusual octet order BOM (2143)", "self", ".", "result", "=", "{", "'encoding'", ":", "\"X-ISO-10646-UCS-4-2143\"", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "elif", "byte_str", ".", "startswith", "(", "(", "codecs", ".", "BOM_LE", ",", "codecs", ".", "BOM_BE", ")", ")", ":", "# FF FE UTF-16, little endian BOM", "# FE FF UTF-16, big endian BOM", "self", ".", "result", "=", "{", "'encoding'", ":", "\"UTF-16\"", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "self", ".", "_got_data", "=", "True", "if", "self", ".", "result", "[", "'encoding'", "]", "is", "not", "None", ":", "self", ".", "done", "=", "True", "return", "# If none of those matched and we've only see ASCII so far, check", "# for high bytes and escape sequences", "if", "self", ".", "_input_state", "==", "InputState", ".", "PURE_ASCII", ":", "if", "self", ".", "HIGH_BYTE_DETECTOR", ".", "search", "(", "byte_str", ")", ":", "self", ".", "_input_state", "=", "InputState", ".", "HIGH_BYTE", "elif", "self", ".", "_input_state", "==", "InputState", ".", "PURE_ASCII", "and", "self", ".", "ESC_DETECTOR", ".", "search", "(", "self", ".", "_last_char", "+", "byte_str", ")", ":", "self", ".", "_input_state", "=", "InputState", ".", "ESC_ASCII", "self", ".", "_last_char", "=", "byte_str", "[", "-", "1", ":", "]", "# If we've seen escape sequences, use the EscCharSetProber, which", "# uses a simple state machine to check for known escape sequences in", "# HZ and ISO-2022 encodings, since those are the only encodings that", "# use such sequences.", "if", "self", ".", "_input_state", "==", "InputState", ".", "ESC_ASCII", ":", "if", "not", "self", ".", "_esc_charset_prober", ":", "self", ".", "_esc_charset_prober", "=", "EscCharSetProber", "(", "self", ".", "lang_filter", ")", "if", "self", ".", "_esc_charset_prober", ".", "feed", "(", "byte_str", ")", "==", "ProbingState", ".", "FOUND_IT", ":", "self", ".", "result", "=", "{", "'encoding'", ":", "self", ".", "_esc_charset_prober", ".", "charset_name", ",", "'confidence'", ":", "self", ".", "_esc_charset_prober", ".", "get_confidence", "(", ")", ",", "'language'", ":", "self", ".", "_esc_charset_prober", ".", "language", "}", "self", ".", "done", "=", "True", "# If we've seen high bytes (i.e., those with values greater than 127),", "# we need to do more complicated checks using all our multi-byte and", "# single-byte probers that are left. The single-byte probers", "# use character bigram distributions to determine the encoding, whereas", "# the multi-byte probers use a combination of character unigram and", "# bigram distributions.", "elif", "self", ".", "_input_state", "==", "InputState", ".", "HIGH_BYTE", ":", "if", "not", "self", ".", "_charset_probers", ":", "self", ".", "_charset_probers", "=", "[", "MBCSGroupProber", "(", "self", ".", "lang_filter", ")", "]", "# If we're checking non-CJK encodings, use single-byte prober", "if", "self", ".", "lang_filter", "&", "LanguageFilter", ".", "NON_CJK", ":", "self", ".", "_charset_probers", ".", "append", "(", "SBCSGroupProber", "(", ")", ")", "self", ".", "_charset_probers", ".", "append", "(", "Latin1Prober", "(", ")", ")", "for", "prober", "in", "self", ".", "_charset_probers", ":", "if", "prober", ".", "feed", "(", "byte_str", ")", "==", "ProbingState", ".", "FOUND_IT", ":", "self", ".", "result", "=", "{", "'encoding'", ":", "prober", ".", "charset_name", ",", "'confidence'", ":", "prober", ".", "get_confidence", "(", ")", ",", "'language'", ":", "prober", ".", "language", "}", "self", ".", "done", "=", "True", "break", "if", "self", ".", "WIN_BYTE_DETECTOR", ".", "search", "(", "byte_str", ")", ":", "self", ".", "_has_win_bytes", "=", "True" ]
[ 110, 4 ]
[ 217, 42 ]
python
en
['en', 'error', 'th']
False
UniversalDetector.close
(self)
Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`.
Stop analyzing the current document and come up with a final prediction.
def close(self): """ Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`. """ # Don't bother with checks if we're already done if self.done: return self.result self.done = True if not self._got_data: self.logger.debug('no data received!') # Default to ASCII if it is all we've seen so far elif self._input_state == InputState.PURE_ASCII: self.result = {'encoding': 'ascii', 'confidence': 1.0, 'language': ''} # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD elif self._input_state == InputState.HIGH_BYTE: prober_confidence = None max_prober_confidence = 0.0 max_prober = None for prober in self._charset_probers: if not prober: continue prober_confidence = prober.get_confidence() if prober_confidence > max_prober_confidence: max_prober_confidence = prober_confidence max_prober = prober if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): charset_name = max_prober.charset_name lower_charset_name = max_prober.charset_name.lower() confidence = max_prober.get_confidence() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes if lower_charset_name.startswith('iso-8859'): if self._has_win_bytes: charset_name = self.ISO_WIN_MAP.get(lower_charset_name, charset_name) self.result = {'encoding': charset_name, 'confidence': confidence, 'language': max_prober.language} # Log all prober confidences if none met MINIMUM_THRESHOLD if self.logger.getEffectiveLevel() <= logging.DEBUG: if self.result['encoding'] is None: self.logger.debug('no probers hit minimum threshold') for group_prober in self._charset_probers: if not group_prober: continue if isinstance(group_prober, CharSetGroupProber): for prober in group_prober.probers: self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, prober.get_confidence()) else: self.logger.debug('%s %s confidence = %s', group_prober.charset_name, group_prober.language, group_prober.get_confidence()) return self.result
[ "def", "close", "(", "self", ")", ":", "# Don't bother with checks if we're already done", "if", "self", ".", "done", ":", "return", "self", ".", "result", "self", ".", "done", "=", "True", "if", "not", "self", ".", "_got_data", ":", "self", ".", "logger", ".", "debug", "(", "'no data received!'", ")", "# Default to ASCII if it is all we've seen so far", "elif", "self", ".", "_input_state", "==", "InputState", ".", "PURE_ASCII", ":", "self", ".", "result", "=", "{", "'encoding'", ":", "'ascii'", ",", "'confidence'", ":", "1.0", ",", "'language'", ":", "''", "}", "# If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD", "elif", "self", ".", "_input_state", "==", "InputState", ".", "HIGH_BYTE", ":", "prober_confidence", "=", "None", "max_prober_confidence", "=", "0.0", "max_prober", "=", "None", "for", "prober", "in", "self", ".", "_charset_probers", ":", "if", "not", "prober", ":", "continue", "prober_confidence", "=", "prober", ".", "get_confidence", "(", ")", "if", "prober_confidence", ">", "max_prober_confidence", ":", "max_prober_confidence", "=", "prober_confidence", "max_prober", "=", "prober", "if", "max_prober", "and", "(", "max_prober_confidence", ">", "self", ".", "MINIMUM_THRESHOLD", ")", ":", "charset_name", "=", "max_prober", ".", "charset_name", "lower_charset_name", "=", "max_prober", ".", "charset_name", ".", "lower", "(", ")", "confidence", "=", "max_prober", ".", "get_confidence", "(", ")", "# Use Windows encoding name instead of ISO-8859 if we saw any", "# extra Windows-specific bytes", "if", "lower_charset_name", ".", "startswith", "(", "'iso-8859'", ")", ":", "if", "self", ".", "_has_win_bytes", ":", "charset_name", "=", "self", ".", "ISO_WIN_MAP", ".", "get", "(", "lower_charset_name", ",", "charset_name", ")", "self", ".", "result", "=", "{", "'encoding'", ":", "charset_name", ",", "'confidence'", ":", "confidence", ",", "'language'", ":", "max_prober", ".", "language", "}", "# Log all prober confidences if none met MINIMUM_THRESHOLD", "if", "self", ".", "logger", ".", "getEffectiveLevel", "(", ")", "<=", "logging", ".", "DEBUG", ":", "if", "self", ".", "result", "[", "'encoding'", "]", "is", "None", ":", "self", ".", "logger", ".", "debug", "(", "'no probers hit minimum threshold'", ")", "for", "group_prober", "in", "self", ".", "_charset_probers", ":", "if", "not", "group_prober", ":", "continue", "if", "isinstance", "(", "group_prober", ",", "CharSetGroupProber", ")", ":", "for", "prober", "in", "group_prober", ".", "probers", ":", "self", ".", "logger", ".", "debug", "(", "'%s %s confidence = %s'", ",", "prober", ".", "charset_name", ",", "prober", ".", "language", ",", "prober", ".", "get_confidence", "(", ")", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "'%s %s confidence = %s'", ",", "group_prober", ".", "charset_name", ",", "group_prober", ".", "language", ",", "group_prober", ".", "get_confidence", "(", ")", ")", "return", "self", ".", "result" ]
[ 219, 4 ]
[ 285, 26 ]
python
en
['en', 'error', 'th']
False
construct_instance
(form, instance, fields=None, exclude=None)
Construct and return a model instance from the bound ``form``'s ``cleaned_data``, but do not save the returned instance to the database.
Construct and return a model instance from the bound ``form``'s ``cleaned_data``, but do not save the returned instance to the database.
def construct_instance(form, instance, fields=None, exclude=None): """ Construct and return a model instance from the bound ``form``'s ``cleaned_data``, but do not save the returned instance to the database. """ from django.db import models opts = instance._meta cleaned_data = form.cleaned_data file_field_list = [] for f in opts.fields: if not f.editable or isinstance(f, models.AutoField) \ or f.name not in cleaned_data: continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue # Leave defaults for fields that aren't in POST data, except for # checkbox inputs because they don't appear in POST data if not checked. if ( f.has_default() and form[f.name].field.widget.value_omitted_from_data(form.data, form.files, form.add_prefix(f.name)) and cleaned_data.get(f.name) in form[f.name].field.empty_values ): continue # Defer saving file-type fields until after the other fields, so a # callable upload_to can use the values from other fields. if isinstance(f, models.FileField): file_field_list.append(f) else: f.save_form_data(instance, cleaned_data[f.name]) for f in file_field_list: f.save_form_data(instance, cleaned_data[f.name]) return instance
[ "def", "construct_instance", "(", "form", ",", "instance", ",", "fields", "=", "None", ",", "exclude", "=", "None", ")", ":", "from", "django", ".", "db", "import", "models", "opts", "=", "instance", ".", "_meta", "cleaned_data", "=", "form", ".", "cleaned_data", "file_field_list", "=", "[", "]", "for", "f", "in", "opts", ".", "fields", ":", "if", "not", "f", ".", "editable", "or", "isinstance", "(", "f", ",", "models", ".", "AutoField", ")", "or", "f", ".", "name", "not", "in", "cleaned_data", ":", "continue", "if", "fields", "is", "not", "None", "and", "f", ".", "name", "not", "in", "fields", ":", "continue", "if", "exclude", "and", "f", ".", "name", "in", "exclude", ":", "continue", "# Leave defaults for fields that aren't in POST data, except for", "# checkbox inputs because they don't appear in POST data if not checked.", "if", "(", "f", ".", "has_default", "(", ")", "and", "form", "[", "f", ".", "name", "]", ".", "field", ".", "widget", ".", "value_omitted_from_data", "(", "form", ".", "data", ",", "form", ".", "files", ",", "form", ".", "add_prefix", "(", "f", ".", "name", ")", ")", "and", "cleaned_data", ".", "get", "(", "f", ".", "name", ")", "in", "form", "[", "f", ".", "name", "]", ".", "field", ".", "empty_values", ")", ":", "continue", "# Defer saving file-type fields until after the other fields, so a", "# callable upload_to can use the values from other fields.", "if", "isinstance", "(", "f", ",", "models", ".", "FileField", ")", ":", "file_field_list", ".", "append", "(", "f", ")", "else", ":", "f", ".", "save_form_data", "(", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")", "for", "f", "in", "file_field_list", ":", "f", ".", "save_form_data", "(", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")", "return", "instance" ]
[ 31, 0 ]
[ 67, 19 ]
python
en
['en', 'error', 'th']
False
model_to_dict
(instance, fields=None, exclude=None)
Return a dict containing the data in ``instance`` suitable for passing as a Form's ``initial`` keyword argument. ``fields`` is an optional list of field names. If provided, return only the named. ``exclude`` is an optional list of field names. If provided, exclude the named from the returned dict, even if they are listed in the ``fields`` argument.
Return a dict containing the data in ``instance`` suitable for passing as a Form's ``initial`` keyword argument.
def model_to_dict(instance, fields=None, exclude=None): """ Return a dict containing the data in ``instance`` suitable for passing as a Form's ``initial`` keyword argument. ``fields`` is an optional list of field names. If provided, return only the named. ``exclude`` is an optional list of field names. If provided, exclude the named from the returned dict, even if they are listed in the ``fields`` argument. """ opts = instance._meta data = {} for f in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many): if not getattr(f, 'editable', False): continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue data[f.name] = f.value_from_object(instance) return data
[ "def", "model_to_dict", "(", "instance", ",", "fields", "=", "None", ",", "exclude", "=", "None", ")", ":", "opts", "=", "instance", ".", "_meta", "data", "=", "{", "}", "for", "f", "in", "chain", "(", "opts", ".", "concrete_fields", ",", "opts", ".", "private_fields", ",", "opts", ".", "many_to_many", ")", ":", "if", "not", "getattr", "(", "f", ",", "'editable'", ",", "False", ")", ":", "continue", "if", "fields", "is", "not", "None", "and", "f", ".", "name", "not", "in", "fields", ":", "continue", "if", "exclude", "and", "f", ".", "name", "in", "exclude", ":", "continue", "data", "[", "f", ".", "name", "]", "=", "f", ".", "value_from_object", "(", "instance", ")", "return", "data" ]
[ 72, 0 ]
[ 94, 15 ]
python
en
['en', 'error', 'th']
False
apply_limit_choices_to_to_formfield
(formfield)
Apply limit_choices_to to the formfield's queryset if needed.
Apply limit_choices_to to the formfield's queryset if needed.
def apply_limit_choices_to_to_formfield(formfield): """Apply limit_choices_to to the formfield's queryset if needed.""" from django.db.models import Exists, OuterRef, Q if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'): limit_choices_to = formfield.get_limit_choices_to() if limit_choices_to: complex_filter = limit_choices_to if not isinstance(complex_filter, Q): complex_filter = Q(**limit_choices_to) complex_filter &= Q(pk=OuterRef('pk')) # Use Exists() to avoid potential duplicates. formfield.queryset = formfield.queryset.filter( Exists(formfield.queryset.model._base_manager.filter(complex_filter)), )
[ "def", "apply_limit_choices_to_to_formfield", "(", "formfield", ")", ":", "from", "django", ".", "db", ".", "models", "import", "Exists", ",", "OuterRef", ",", "Q", "if", "hasattr", "(", "formfield", ",", "'queryset'", ")", "and", "hasattr", "(", "formfield", ",", "'get_limit_choices_to'", ")", ":", "limit_choices_to", "=", "formfield", ".", "get_limit_choices_to", "(", ")", "if", "limit_choices_to", ":", "complex_filter", "=", "limit_choices_to", "if", "not", "isinstance", "(", "complex_filter", ",", "Q", ")", ":", "complex_filter", "=", "Q", "(", "*", "*", "limit_choices_to", ")", "complex_filter", "&=", "Q", "(", "pk", "=", "OuterRef", "(", "'pk'", ")", ")", "# Use Exists() to avoid potential duplicates.", "formfield", ".", "queryset", "=", "formfield", ".", "queryset", ".", "filter", "(", "Exists", "(", "formfield", ".", "queryset", ".", "model", ".", "_base_manager", ".", "filter", "(", "complex_filter", ")", ")", ",", ")" ]
[ 97, 0 ]
[ 110, 13 ]
python
en
['en', 'en', 'en']
True
fields_for_model
(model, fields=None, exclude=None, widgets=None, formfield_callback=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None, *, apply_limit_choices_to=True)
Return a dictionary containing form fields for the given model. ``fields`` is an optional list of field names. If provided, return only the named fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``localized_fields`` is a list of names of fields which should be localized. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class. ``apply_limit_choices_to`` is a boolean indicating if limit_choices_to should be applied to a field's queryset.
Return a dictionary containing form fields for the given model.
def fields_for_model(model, fields=None, exclude=None, widgets=None, formfield_callback=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None, *, apply_limit_choices_to=True): """ Return a dictionary containing form fields for the given model. ``fields`` is an optional list of field names. If provided, return only the named fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``localized_fields`` is a list of names of fields which should be localized. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class. ``apply_limit_choices_to`` is a boolean indicating if limit_choices_to should be applied to a field's queryset. """ field_dict = {} ignored = [] opts = model._meta # Avoid circular import from django.db.models import Field as ModelField sortable_private_fields = [f for f in opts.private_fields if isinstance(f, ModelField)] for f in sorted(chain(opts.concrete_fields, sortable_private_fields, opts.many_to_many)): if not getattr(f, 'editable', False): if (fields is not None and f.name in fields and (exclude is None or f.name not in exclude)): raise FieldError( "'%s' cannot be specified for %s model form as it is a non-editable field" % ( f.name, model.__name__) ) continue if fields is not None and f.name not in fields: continue if exclude and f.name in exclude: continue kwargs = {} if widgets and f.name in widgets: kwargs['widget'] = widgets[f.name] if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields): kwargs['localize'] = True if labels and f.name in labels: kwargs['label'] = labels[f.name] if help_texts and f.name in help_texts: kwargs['help_text'] = help_texts[f.name] if error_messages and f.name in error_messages: kwargs['error_messages'] = error_messages[f.name] if field_classes and f.name in field_classes: kwargs['form_class'] = field_classes[f.name] if formfield_callback is None: formfield = f.formfield(**kwargs) elif not callable(formfield_callback): raise TypeError('formfield_callback must be a function or callable') else: formfield = formfield_callback(f, **kwargs) if formfield: if apply_limit_choices_to: apply_limit_choices_to_to_formfield(formfield) field_dict[f.name] = formfield else: ignored.append(f.name) if fields: field_dict = { f: field_dict.get(f) for f in fields if (not exclude or f not in exclude) and f not in ignored } return field_dict
[ "def", "fields_for_model", "(", "model", ",", "fields", "=", "None", ",", "exclude", "=", "None", ",", "widgets", "=", "None", ",", "formfield_callback", "=", "None", ",", "localized_fields", "=", "None", ",", "labels", "=", "None", ",", "help_texts", "=", "None", ",", "error_messages", "=", "None", ",", "field_classes", "=", "None", ",", "*", ",", "apply_limit_choices_to", "=", "True", ")", ":", "field_dict", "=", "{", "}", "ignored", "=", "[", "]", "opts", "=", "model", ".", "_meta", "# Avoid circular import", "from", "django", ".", "db", ".", "models", "import", "Field", "as", "ModelField", "sortable_private_fields", "=", "[", "f", "for", "f", "in", "opts", ".", "private_fields", "if", "isinstance", "(", "f", ",", "ModelField", ")", "]", "for", "f", "in", "sorted", "(", "chain", "(", "opts", ".", "concrete_fields", ",", "sortable_private_fields", ",", "opts", ".", "many_to_many", ")", ")", ":", "if", "not", "getattr", "(", "f", ",", "'editable'", ",", "False", ")", ":", "if", "(", "fields", "is", "not", "None", "and", "f", ".", "name", "in", "fields", "and", "(", "exclude", "is", "None", "or", "f", ".", "name", "not", "in", "exclude", ")", ")", ":", "raise", "FieldError", "(", "\"'%s' cannot be specified for %s model form as it is a non-editable field\"", "%", "(", "f", ".", "name", ",", "model", ".", "__name__", ")", ")", "continue", "if", "fields", "is", "not", "None", "and", "f", ".", "name", "not", "in", "fields", ":", "continue", "if", "exclude", "and", "f", ".", "name", "in", "exclude", ":", "continue", "kwargs", "=", "{", "}", "if", "widgets", "and", "f", ".", "name", "in", "widgets", ":", "kwargs", "[", "'widget'", "]", "=", "widgets", "[", "f", ".", "name", "]", "if", "localized_fields", "==", "ALL_FIELDS", "or", "(", "localized_fields", "and", "f", ".", "name", "in", "localized_fields", ")", ":", "kwargs", "[", "'localize'", "]", "=", "True", "if", "labels", "and", "f", ".", "name", "in", "labels", ":", "kwargs", "[", "'label'", "]", "=", "labels", "[", "f", ".", "name", "]", "if", "help_texts", "and", "f", ".", "name", "in", "help_texts", ":", "kwargs", "[", "'help_text'", "]", "=", "help_texts", "[", "f", ".", "name", "]", "if", "error_messages", "and", "f", ".", "name", "in", "error_messages", ":", "kwargs", "[", "'error_messages'", "]", "=", "error_messages", "[", "f", ".", "name", "]", "if", "field_classes", "and", "f", ".", "name", "in", "field_classes", ":", "kwargs", "[", "'form_class'", "]", "=", "field_classes", "[", "f", ".", "name", "]", "if", "formfield_callback", "is", "None", ":", "formfield", "=", "f", ".", "formfield", "(", "*", "*", "kwargs", ")", "elif", "not", "callable", "(", "formfield_callback", ")", ":", "raise", "TypeError", "(", "'formfield_callback must be a function or callable'", ")", "else", ":", "formfield", "=", "formfield_callback", "(", "f", ",", "*", "*", "kwargs", ")", "if", "formfield", ":", "if", "apply_limit_choices_to", ":", "apply_limit_choices_to_to_formfield", "(", "formfield", ")", "field_dict", "[", "f", ".", "name", "]", "=", "formfield", "else", ":", "ignored", ".", "append", "(", "f", ".", "name", ")", "if", "fields", ":", "field_dict", "=", "{", "f", ":", "field_dict", ".", "get", "(", "f", ")", "for", "f", "in", "fields", "if", "(", "not", "exclude", "or", "f", "not", "in", "exclude", ")", "and", "f", "not", "in", "ignored", "}", "return", "field_dict" ]
[ 113, 0 ]
[ 199, 21 ]
python
en
['en', 'error', 'th']
False
modelform_factory
(model, form=ModelForm, fields=None, exclude=None, formfield_callback=None, widgets=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None)
Return a ModelForm containing form fields for the given model. You can optionally pass a `form` argument to use as a starting point for constructing the ModelForm. ``fields`` is an optional list of field names. If provided, include only the named fields in the returned fields. If omitted or '__all__', use all fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``localized_fields`` is a list of names of fields which should be localized. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class.
Return a ModelForm containing form fields for the given model. You can optionally pass a `form` argument to use as a starting point for constructing the ModelForm.
def modelform_factory(model, form=ModelForm, fields=None, exclude=None, formfield_callback=None, widgets=None, localized_fields=None, labels=None, help_texts=None, error_messages=None, field_classes=None): """ Return a ModelForm containing form fields for the given model. You can optionally pass a `form` argument to use as a starting point for constructing the ModelForm. ``fields`` is an optional list of field names. If provided, include only the named fields in the returned fields. If omitted or '__all__', use all fields. ``exclude`` is an optional list of field names. If provided, exclude the named fields from the returned fields, even if they are listed in the ``fields`` argument. ``widgets`` is a dictionary of model field names mapped to a widget. ``localized_fields`` is a list of names of fields which should be localized. ``formfield_callback`` is a callable that takes a model field and returns a form field. ``labels`` is a dictionary of model field names mapped to a label. ``help_texts`` is a dictionary of model field names mapped to a help text. ``error_messages`` is a dictionary of model field names mapped to a dictionary of error messages. ``field_classes`` is a dictionary of model field names mapped to a form field class. """ # Create the inner Meta class. FIXME: ideally, we should be able to # construct a ModelForm without creating and passing in a temporary # inner class. # Build up a list of attributes that the Meta object will have. attrs = {'model': model} if fields is not None: attrs['fields'] = fields if exclude is not None: attrs['exclude'] = exclude if widgets is not None: attrs['widgets'] = widgets if localized_fields is not None: attrs['localized_fields'] = localized_fields if labels is not None: attrs['labels'] = labels if help_texts is not None: attrs['help_texts'] = help_texts if error_messages is not None: attrs['error_messages'] = error_messages if field_classes is not None: attrs['field_classes'] = field_classes # If parent form class already has an inner Meta, the Meta we're # creating needs to inherit from the parent's inner meta. bases = (form.Meta,) if hasattr(form, 'Meta') else () Meta = type('Meta', bases, attrs) if formfield_callback: Meta.formfield_callback = staticmethod(formfield_callback) # Give this new form class a reasonable name. class_name = model.__name__ + 'Form' # Class attributes for the new form class. form_class_attrs = { 'Meta': Meta, 'formfield_callback': formfield_callback } if (getattr(Meta, 'fields', None) is None and getattr(Meta, 'exclude', None) is None): raise ImproperlyConfigured( "Calling modelform_factory without defining 'fields' or " "'exclude' explicitly is prohibited." ) # Instantiate type(form) in order to use the same metaclass as form. return type(form)(class_name, (form,), form_class_attrs)
[ "def", "modelform_factory", "(", "model", ",", "form", "=", "ModelForm", ",", "fields", "=", "None", ",", "exclude", "=", "None", ",", "formfield_callback", "=", "None", ",", "widgets", "=", "None", ",", "localized_fields", "=", "None", ",", "labels", "=", "None", ",", "help_texts", "=", "None", ",", "error_messages", "=", "None", ",", "field_classes", "=", "None", ")", ":", "# Create the inner Meta class. FIXME: ideally, we should be able to", "# construct a ModelForm without creating and passing in a temporary", "# inner class.", "# Build up a list of attributes that the Meta object will have.", "attrs", "=", "{", "'model'", ":", "model", "}", "if", "fields", "is", "not", "None", ":", "attrs", "[", "'fields'", "]", "=", "fields", "if", "exclude", "is", "not", "None", ":", "attrs", "[", "'exclude'", "]", "=", "exclude", "if", "widgets", "is", "not", "None", ":", "attrs", "[", "'widgets'", "]", "=", "widgets", "if", "localized_fields", "is", "not", "None", ":", "attrs", "[", "'localized_fields'", "]", "=", "localized_fields", "if", "labels", "is", "not", "None", ":", "attrs", "[", "'labels'", "]", "=", "labels", "if", "help_texts", "is", "not", "None", ":", "attrs", "[", "'help_texts'", "]", "=", "help_texts", "if", "error_messages", "is", "not", "None", ":", "attrs", "[", "'error_messages'", "]", "=", "error_messages", "if", "field_classes", "is", "not", "None", ":", "attrs", "[", "'field_classes'", "]", "=", "field_classes", "# If parent form class already has an inner Meta, the Meta we're", "# creating needs to inherit from the parent's inner meta.", "bases", "=", "(", "form", ".", "Meta", ",", ")", "if", "hasattr", "(", "form", ",", "'Meta'", ")", "else", "(", ")", "Meta", "=", "type", "(", "'Meta'", ",", "bases", ",", "attrs", ")", "if", "formfield_callback", ":", "Meta", ".", "formfield_callback", "=", "staticmethod", "(", "formfield_callback", ")", "# Give this new form class a reasonable name.", "class_name", "=", "model", ".", "__name__", "+", "'Form'", "# Class attributes for the new form class.", "form_class_attrs", "=", "{", "'Meta'", ":", "Meta", ",", "'formfield_callback'", ":", "formfield_callback", "}", "if", "(", "getattr", "(", "Meta", ",", "'fields'", ",", "None", ")", "is", "None", "and", "getattr", "(", "Meta", ",", "'exclude'", ",", "None", ")", "is", "None", ")", ":", "raise", "ImproperlyConfigured", "(", "\"Calling modelform_factory without defining 'fields' or \"", "\"'exclude' explicitly is prohibited.\"", ")", "# Instantiate type(form) in order to use the same metaclass as form.", "return", "type", "(", "form", ")", "(", "class_name", ",", "(", "form", ",", ")", ",", "form_class_attrs", ")" ]
[ 482, 0 ]
[ 562, 60 ]
python
en
['en', 'error', 'th']
False
modelformset_factory
(model, form=ModelForm, formfield_callback=None, formset=BaseModelFormSet, extra=1, can_delete=False, can_order=False, max_num=None, fields=None, exclude=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None, can_delete_extra=True)
Return a FormSet class for the given Django model class.
Return a FormSet class for the given Django model class.
def modelformset_factory(model, form=ModelForm, formfield_callback=None, formset=BaseModelFormSet, extra=1, can_delete=False, can_order=False, max_num=None, fields=None, exclude=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None, can_delete_extra=True): """Return a FormSet class for the given Django model class.""" meta = getattr(form, 'Meta', None) if (getattr(meta, 'fields', fields) is None and getattr(meta, 'exclude', exclude) is None): raise ImproperlyConfigured( "Calling modelformset_factory without defining 'fields' or " "'exclude' explicitly is prohibited." ) form = modelform_factory(model, form=form, fields=fields, exclude=exclude, formfield_callback=formfield_callback, widgets=widgets, localized_fields=localized_fields, labels=labels, help_texts=help_texts, error_messages=error_messages, field_classes=field_classes) FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num, can_order=can_order, can_delete=can_delete, validate_min=validate_min, validate_max=validate_max, absolute_max=absolute_max, can_delete_extra=can_delete_extra) FormSet.model = model return FormSet
[ "def", "modelformset_factory", "(", "model", ",", "form", "=", "ModelForm", ",", "formfield_callback", "=", "None", ",", "formset", "=", "BaseModelFormSet", ",", "extra", "=", "1", ",", "can_delete", "=", "False", ",", "can_order", "=", "False", ",", "max_num", "=", "None", ",", "fields", "=", "None", ",", "exclude", "=", "None", ",", "widgets", "=", "None", ",", "validate_max", "=", "False", ",", "localized_fields", "=", "None", ",", "labels", "=", "None", ",", "help_texts", "=", "None", ",", "error_messages", "=", "None", ",", "min_num", "=", "None", ",", "validate_min", "=", "False", ",", "field_classes", "=", "None", ",", "absolute_max", "=", "None", ",", "can_delete_extra", "=", "True", ")", ":", "meta", "=", "getattr", "(", "form", ",", "'Meta'", ",", "None", ")", "if", "(", "getattr", "(", "meta", ",", "'fields'", ",", "fields", ")", "is", "None", "and", "getattr", "(", "meta", ",", "'exclude'", ",", "exclude", ")", "is", "None", ")", ":", "raise", "ImproperlyConfigured", "(", "\"Calling modelformset_factory without defining 'fields' or \"", "\"'exclude' explicitly is prohibited.\"", ")", "form", "=", "modelform_factory", "(", "model", ",", "form", "=", "form", ",", "fields", "=", "fields", ",", "exclude", "=", "exclude", ",", "formfield_callback", "=", "formfield_callback", ",", "widgets", "=", "widgets", ",", "localized_fields", "=", "localized_fields", ",", "labels", "=", "labels", ",", "help_texts", "=", "help_texts", ",", "error_messages", "=", "error_messages", ",", "field_classes", "=", "field_classes", ")", "FormSet", "=", "formset_factory", "(", "form", ",", "formset", ",", "extra", "=", "extra", ",", "min_num", "=", "min_num", ",", "max_num", "=", "max_num", ",", "can_order", "=", "can_order", ",", "can_delete", "=", "can_delete", ",", "validate_min", "=", "validate_min", ",", "validate_max", "=", "validate_max", ",", "absolute_max", "=", "absolute_max", ",", "can_delete_extra", "=", "can_delete_extra", ")", "FormSet", ".", "model", "=", "model", "return", "FormSet" ]
[ 867, 0 ]
[ 893, 18 ]
python
en
['en', 'en', 'en']
True
_get_foreign_key
(parent_model, model, fk_name=None, can_fail=False)
Find and return the ForeignKey from model to parent if there is one (return None if can_fail is True and no such field exists). If fk_name is provided, assume it is the name of the ForeignKey field. Unless can_fail is True, raise an exception if there isn't a ForeignKey from model to parent_model.
Find and return the ForeignKey from model to parent if there is one (return None if can_fail is True and no such field exists). If fk_name is provided, assume it is the name of the ForeignKey field. Unless can_fail is True, raise an exception if there isn't a ForeignKey from model to parent_model.
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False): """ Find and return the ForeignKey from model to parent if there is one (return None if can_fail is True and no such field exists). If fk_name is provided, assume it is the name of the ForeignKey field. Unless can_fail is True, raise an exception if there isn't a ForeignKey from model to parent_model. """ # avoid circular import from django.db.models import ForeignKey opts = model._meta if fk_name: fks_to_parent = [f for f in opts.fields if f.name == fk_name] if len(fks_to_parent) == 1: fk = fks_to_parent[0] if not isinstance(fk, ForeignKey) or \ (fk.remote_field.model != parent_model and fk.remote_field.model not in parent_model._meta.get_parent_list()): raise ValueError( "fk_name '%s' is not a ForeignKey to '%s'." % (fk_name, parent_model._meta.label) ) elif not fks_to_parent: raise ValueError( "'%s' has no field named '%s'." % (model._meta.label, fk_name) ) else: # Try to discover what the ForeignKey from model to parent_model is fks_to_parent = [ f for f in opts.fields if isinstance(f, ForeignKey) and ( f.remote_field.model == parent_model or f.remote_field.model in parent_model._meta.get_parent_list() ) ] if len(fks_to_parent) == 1: fk = fks_to_parent[0] elif not fks_to_parent: if can_fail: return raise ValueError( "'%s' has no ForeignKey to '%s'." % ( model._meta.label, parent_model._meta.label, ) ) else: raise ValueError( "'%s' has more than one ForeignKey to '%s'. You must specify " "a 'fk_name' attribute." % ( model._meta.label, parent_model._meta.label, ) ) return fk
[ "def", "_get_foreign_key", "(", "parent_model", ",", "model", ",", "fk_name", "=", "None", ",", "can_fail", "=", "False", ")", ":", "# avoid circular import", "from", "django", ".", "db", ".", "models", "import", "ForeignKey", "opts", "=", "model", ".", "_meta", "if", "fk_name", ":", "fks_to_parent", "=", "[", "f", "for", "f", "in", "opts", ".", "fields", "if", "f", ".", "name", "==", "fk_name", "]", "if", "len", "(", "fks_to_parent", ")", "==", "1", ":", "fk", "=", "fks_to_parent", "[", "0", "]", "if", "not", "isinstance", "(", "fk", ",", "ForeignKey", ")", "or", "(", "fk", ".", "remote_field", ".", "model", "!=", "parent_model", "and", "fk", ".", "remote_field", ".", "model", "not", "in", "parent_model", ".", "_meta", ".", "get_parent_list", "(", ")", ")", ":", "raise", "ValueError", "(", "\"fk_name '%s' is not a ForeignKey to '%s'.\"", "%", "(", "fk_name", ",", "parent_model", ".", "_meta", ".", "label", ")", ")", "elif", "not", "fks_to_parent", ":", "raise", "ValueError", "(", "\"'%s' has no field named '%s'.\"", "%", "(", "model", ".", "_meta", ".", "label", ",", "fk_name", ")", ")", "else", ":", "# Try to discover what the ForeignKey from model to parent_model is", "fks_to_parent", "=", "[", "f", "for", "f", "in", "opts", ".", "fields", "if", "isinstance", "(", "f", ",", "ForeignKey", ")", "and", "(", "f", ".", "remote_field", ".", "model", "==", "parent_model", "or", "f", ".", "remote_field", ".", "model", "in", "parent_model", ".", "_meta", ".", "get_parent_list", "(", ")", ")", "]", "if", "len", "(", "fks_to_parent", ")", "==", "1", ":", "fk", "=", "fks_to_parent", "[", "0", "]", "elif", "not", "fks_to_parent", ":", "if", "can_fail", ":", "return", "raise", "ValueError", "(", "\"'%s' has no ForeignKey to '%s'.\"", "%", "(", "model", ".", "_meta", ".", "label", ",", "parent_model", ".", "_meta", ".", "label", ",", ")", ")", "else", ":", "raise", "ValueError", "(", "\"'%s' has more than one ForeignKey to '%s'. You must specify \"", "\"a 'fk_name' attribute.\"", "%", "(", "model", ".", "_meta", ".", "label", ",", "parent_model", ".", "_meta", ".", "label", ",", ")", ")", "return", "fk" ]
[ 998, 0 ]
[ 1051, 13 ]
python
en
['en', 'error', 'th']
False
inlineformset_factory
(parent_model, model, form=ModelForm, formset=BaseInlineFormSet, fk_name=None, fields=None, exclude=None, extra=3, can_order=False, can_delete=True, max_num=None, formfield_callback=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None, can_delete_extra=True)
Return an ``InlineFormSet`` for the given kwargs. ``fk_name`` must be provided if ``model`` has more than one ``ForeignKey`` to ``parent_model``.
Return an ``InlineFormSet`` for the given kwargs.
def inlineformset_factory(parent_model, model, form=ModelForm, formset=BaseInlineFormSet, fk_name=None, fields=None, exclude=None, extra=3, can_order=False, can_delete=True, max_num=None, formfield_callback=None, widgets=None, validate_max=False, localized_fields=None, labels=None, help_texts=None, error_messages=None, min_num=None, validate_min=False, field_classes=None, absolute_max=None, can_delete_extra=True): """ Return an ``InlineFormSet`` for the given kwargs. ``fk_name`` must be provided if ``model`` has more than one ``ForeignKey`` to ``parent_model``. """ fk = _get_foreign_key(parent_model, model, fk_name=fk_name) # enforce a max_num=1 when the foreign key to the parent model is unique. if fk.unique: max_num = 1 kwargs = { 'form': form, 'formfield_callback': formfield_callback, 'formset': formset, 'extra': extra, 'can_delete': can_delete, 'can_order': can_order, 'fields': fields, 'exclude': exclude, 'min_num': min_num, 'max_num': max_num, 'widgets': widgets, 'validate_min': validate_min, 'validate_max': validate_max, 'localized_fields': localized_fields, 'labels': labels, 'help_texts': help_texts, 'error_messages': error_messages, 'field_classes': field_classes, 'absolute_max': absolute_max, 'can_delete_extra': can_delete_extra, } FormSet = modelformset_factory(model, **kwargs) FormSet.fk = fk return FormSet
[ "def", "inlineformset_factory", "(", "parent_model", ",", "model", ",", "form", "=", "ModelForm", ",", "formset", "=", "BaseInlineFormSet", ",", "fk_name", "=", "None", ",", "fields", "=", "None", ",", "exclude", "=", "None", ",", "extra", "=", "3", ",", "can_order", "=", "False", ",", "can_delete", "=", "True", ",", "max_num", "=", "None", ",", "formfield_callback", "=", "None", ",", "widgets", "=", "None", ",", "validate_max", "=", "False", ",", "localized_fields", "=", "None", ",", "labels", "=", "None", ",", "help_texts", "=", "None", ",", "error_messages", "=", "None", ",", "min_num", "=", "None", ",", "validate_min", "=", "False", ",", "field_classes", "=", "None", ",", "absolute_max", "=", "None", ",", "can_delete_extra", "=", "True", ")", ":", "fk", "=", "_get_foreign_key", "(", "parent_model", ",", "model", ",", "fk_name", "=", "fk_name", ")", "# enforce a max_num=1 when the foreign key to the parent model is unique.", "if", "fk", ".", "unique", ":", "max_num", "=", "1", "kwargs", "=", "{", "'form'", ":", "form", ",", "'formfield_callback'", ":", "formfield_callback", ",", "'formset'", ":", "formset", ",", "'extra'", ":", "extra", ",", "'can_delete'", ":", "can_delete", ",", "'can_order'", ":", "can_order", ",", "'fields'", ":", "fields", ",", "'exclude'", ":", "exclude", ",", "'min_num'", ":", "min_num", ",", "'max_num'", ":", "max_num", ",", "'widgets'", ":", "widgets", ",", "'validate_min'", ":", "validate_min", ",", "'validate_max'", ":", "validate_max", ",", "'localized_fields'", ":", "localized_fields", ",", "'labels'", ":", "labels", ",", "'help_texts'", ":", "help_texts", ",", "'error_messages'", ":", "error_messages", ",", "'field_classes'", ":", "field_classes", ",", "'absolute_max'", ":", "absolute_max", ",", "'can_delete_extra'", ":", "can_delete_extra", ",", "}", "FormSet", "=", "modelformset_factory", "(", "model", ",", "*", "*", "kwargs", ")", "FormSet", ".", "fk", "=", "fk", "return", "FormSet" ]
[ 1054, 0 ]
[ 1096, 18 ]
python
en
['en', 'error', 'th']
False
BaseModelForm._get_validation_exclusions
(self)
For backwards-compatibility, exclude several types of fields from model validation. See tickets #12507, #12521, #12553.
For backwards-compatibility, exclude several types of fields from model validation. See tickets #12507, #12521, #12553.
def _get_validation_exclusions(self): """ For backwards-compatibility, exclude several types of fields from model validation. See tickets #12507, #12521, #12553. """ exclude = [] # Build up a list of fields that should be excluded from model field # validation and unique checks. for f in self.instance._meta.fields: field = f.name # Exclude fields that aren't on the form. The developer may be # adding these values to the model after form validation. if field not in self.fields: exclude.append(f.name) # Don't perform model validation on fields that were defined # manually on the form and excluded via the ModelForm's Meta # class. See #12901. elif self._meta.fields and field not in self._meta.fields: exclude.append(f.name) elif self._meta.exclude and field in self._meta.exclude: exclude.append(f.name) # Exclude fields that failed form validation. There's no need for # the model fields to validate them as well. elif field in self._errors: exclude.append(f.name) # Exclude empty fields that are not required by the form, if the # underlying model field is required. This keeps the model field # from raising a required error. Note: don't exclude the field from # validation if the model field allows blanks. If it does, the blank # value may be included in a unique check, so cannot be excluded # from validation. else: form_field = self.fields[field] field_value = self.cleaned_data.get(field) if not f.blank and not form_field.required and field_value in form_field.empty_values: exclude.append(f.name) return exclude
[ "def", "_get_validation_exclusions", "(", "self", ")", ":", "exclude", "=", "[", "]", "# Build up a list of fields that should be excluded from model field", "# validation and unique checks.", "for", "f", "in", "self", ".", "instance", ".", "_meta", ".", "fields", ":", "field", "=", "f", ".", "name", "# Exclude fields that aren't on the form. The developer may be", "# adding these values to the model after form validation.", "if", "field", "not", "in", "self", ".", "fields", ":", "exclude", ".", "append", "(", "f", ".", "name", ")", "# Don't perform model validation on fields that were defined", "# manually on the form and excluded via the ModelForm's Meta", "# class. See #12901.", "elif", "self", ".", "_meta", ".", "fields", "and", "field", "not", "in", "self", ".", "_meta", ".", "fields", ":", "exclude", ".", "append", "(", "f", ".", "name", ")", "elif", "self", ".", "_meta", ".", "exclude", "and", "field", "in", "self", ".", "_meta", ".", "exclude", ":", "exclude", ".", "append", "(", "f", ".", "name", ")", "# Exclude fields that failed form validation. There's no need for", "# the model fields to validate them as well.", "elif", "field", "in", "self", ".", "_errors", ":", "exclude", ".", "append", "(", "f", ".", "name", ")", "# Exclude empty fields that are not required by the form, if the", "# underlying model field is required. This keeps the model field", "# from raising a required error. Note: don't exclude the field from", "# validation if the model field allows blanks. If it does, the blank", "# value may be included in a unique check, so cannot be excluded", "# from validation.", "else", ":", "form_field", "=", "self", ".", "fields", "[", "field", "]", "field_value", "=", "self", ".", "cleaned_data", ".", "get", "(", "field", ")", "if", "not", "f", ".", "blank", "and", "not", "form_field", ".", "required", "and", "field_value", "in", "form_field", ".", "empty_values", ":", "exclude", ".", "append", "(", "f", ".", "name", ")", "return", "exclude" ]
[ 317, 4 ]
[ 356, 22 ]
python
en
['en', 'error', 'th']
False
BaseModelForm.validate_unique
(self)
Call the instance's validate_unique() method and update the form's validation errors if any were raised.
Call the instance's validate_unique() method and update the form's validation errors if any were raised.
def validate_unique(self): """ Call the instance's validate_unique() method and update the form's validation errors if any were raised. """ exclude = self._get_validation_exclusions() try: self.instance.validate_unique(exclude=exclude) except ValidationError as e: self._update_errors(e)
[ "def", "validate_unique", "(", "self", ")", ":", "exclude", "=", "self", ".", "_get_validation_exclusions", "(", ")", "try", ":", "self", ".", "instance", ".", "validate_unique", "(", "exclude", "=", "exclude", ")", "except", "ValidationError", "as", "e", ":", "self", ".", "_update_errors", "(", "e", ")" ]
[ 420, 4 ]
[ 429, 34 ]
python
en
['en', 'error', 'th']
False
BaseModelForm._save_m2m
(self)
Save the many-to-many fields and generic relations for this form.
Save the many-to-many fields and generic relations for this form.
def _save_m2m(self): """ Save the many-to-many fields and generic relations for this form. """ cleaned_data = self.cleaned_data exclude = self._meta.exclude fields = self._meta.fields opts = self.instance._meta # Note that for historical reasons we want to include also # private_fields here. (GenericRelation was previously a fake # m2m field). for f in chain(opts.many_to_many, opts.private_fields): if not hasattr(f, 'save_form_data'): continue if fields and f.name not in fields: continue if exclude and f.name in exclude: continue if f.name in cleaned_data: f.save_form_data(self.instance, cleaned_data[f.name])
[ "def", "_save_m2m", "(", "self", ")", ":", "cleaned_data", "=", "self", ".", "cleaned_data", "exclude", "=", "self", ".", "_meta", ".", "exclude", "fields", "=", "self", ".", "_meta", ".", "fields", "opts", "=", "self", ".", "instance", ".", "_meta", "# Note that for historical reasons we want to include also", "# private_fields here. (GenericRelation was previously a fake", "# m2m field).", "for", "f", "in", "chain", "(", "opts", ".", "many_to_many", ",", "opts", ".", "private_fields", ")", ":", "if", "not", "hasattr", "(", "f", ",", "'save_form_data'", ")", ":", "continue", "if", "fields", "and", "f", ".", "name", "not", "in", "fields", ":", "continue", "if", "exclude", "and", "f", ".", "name", "in", "exclude", ":", "continue", "if", "f", ".", "name", "in", "cleaned_data", ":", "f", ".", "save_form_data", "(", "self", ".", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")" ]
[ 431, 4 ]
[ 450, 69 ]
python
en
['en', 'error', 'th']
False
BaseModelForm.save
(self, commit=True)
Save this form's self.instance object if commit=True. Otherwise, add a save_m2m() method to the form which can be called after the instance is saved manually at a later time. Return the model instance.
Save this form's self.instance object if commit=True. Otherwise, add a save_m2m() method to the form which can be called after the instance is saved manually at a later time. Return the model instance.
def save(self, commit=True): """ Save this form's self.instance object if commit=True. Otherwise, add a save_m2m() method to the form which can be called after the instance is saved manually at a later time. Return the model instance. """ if self.errors: raise ValueError( "The %s could not be %s because the data didn't validate." % ( self.instance._meta.object_name, 'created' if self.instance._state.adding else 'changed', ) ) if commit: # If committing, save the instance and the m2m data immediately. self.instance.save() self._save_m2m() else: # If not committing, add a method to the form to allow deferred # saving of m2m data. self.save_m2m = self._save_m2m return self.instance
[ "def", "save", "(", "self", ",", "commit", "=", "True", ")", ":", "if", "self", ".", "errors", ":", "raise", "ValueError", "(", "\"The %s could not be %s because the data didn't validate.\"", "%", "(", "self", ".", "instance", ".", "_meta", ".", "object_name", ",", "'created'", "if", "self", ".", "instance", ".", "_state", ".", "adding", "else", "'changed'", ",", ")", ")", "if", "commit", ":", "# If committing, save the instance and the m2m data immediately.", "self", ".", "instance", ".", "save", "(", ")", "self", ".", "_save_m2m", "(", ")", "else", ":", "# If not committing, add a method to the form to allow deferred", "# saving of m2m data.", "self", ".", "save_m2m", "=", "self", ".", "_save_m2m", "return", "self", ".", "instance" ]
[ 452, 4 ]
[ 473, 28 ]
python
en
['en', 'error', 'th']
False
BaseModelFormSet.initial_form_count
(self)
Return the number of forms that are required in this FormSet.
Return the number of forms that are required in this FormSet.
def initial_form_count(self): """Return the number of forms that are required in this FormSet.""" if not self.is_bound: return len(self.get_queryset()) return super().initial_form_count()
[ "def", "initial_form_count", "(", "self", ")", ":", "if", "not", "self", ".", "is_bound", ":", "return", "len", "(", "self", ".", "get_queryset", "(", ")", ")", "return", "super", "(", ")", ".", "initial_form_count", "(", ")" ]
[ 582, 4 ]
[ 586, 43 ]
python
en
['en', 'en', 'en']
True
BaseModelFormSet._get_to_python
(self, field)
If the field is a related field, fetch the concrete field's (that is, the ultimate pointed-to field's) to_python.
If the field is a related field, fetch the concrete field's (that is, the ultimate pointed-to field's) to_python.
def _get_to_python(self, field): """ If the field is a related field, fetch the concrete field's (that is, the ultimate pointed-to field's) to_python. """ while field.remote_field is not None: field = field.remote_field.get_related_field() return field.to_python
[ "def", "_get_to_python", "(", "self", ",", "field", ")", ":", "while", "field", ".", "remote_field", "is", "not", "None", ":", "field", "=", "field", ".", "remote_field", ".", "get_related_field", "(", ")", "return", "field", ".", "to_python" ]
[ 593, 4 ]
[ 600, 30 ]
python
en
['en', 'error', 'th']
False
BaseModelFormSet.save_new
(self, form, commit=True)
Save and return a new model instance for the given form.
Save and return a new model instance for the given form.
def save_new(self, form, commit=True): """Save and return a new model instance for the given form.""" return form.save(commit=commit)
[ "def", "save_new", "(", "self", ",", "form", ",", "commit", "=", "True", ")", ":", "return", "form", ".", "save", "(", "commit", "=", "commit", ")" ]
[ 655, 4 ]
[ 657, 39 ]
python
en
['en', 'en', 'en']
True
BaseModelFormSet.save_existing
(self, form, instance, commit=True)
Save and return an existing model instance for the given form.
Save and return an existing model instance for the given form.
def save_existing(self, form, instance, commit=True): """Save and return an existing model instance for the given form.""" return form.save(commit=commit)
[ "def", "save_existing", "(", "self", ",", "form", ",", "instance", ",", "commit", "=", "True", ")", ":", "return", "form", ".", "save", "(", "commit", "=", "commit", ")" ]
[ 659, 4 ]
[ 661, 39 ]
python
en
['en', 'en', 'en']
True
BaseModelFormSet.delete_existing
(self, obj, commit=True)
Deletes an existing model instance.
Deletes an existing model instance.
def delete_existing(self, obj, commit=True): """Deletes an existing model instance.""" if commit: obj.delete()
[ "def", "delete_existing", "(", "self", ",", "obj", ",", "commit", "=", "True", ")", ":", "if", "commit", ":", "obj", ".", "delete", "(", ")" ]
[ 663, 4 ]
[ 666, 24 ]
python
en
['en', 'en', 'en']
True
BaseModelFormSet.save
(self, commit=True)
Save model instances for every form, adding and changing instances as necessary, and return the list of instances.
Save model instances for every form, adding and changing instances as necessary, and return the list of instances.
def save(self, commit=True): """ Save model instances for every form, adding and changing instances as necessary, and return the list of instances. """ if not commit: self.saved_forms = [] def save_m2m(): for form in self.saved_forms: form.save_m2m() self.save_m2m = save_m2m return self.save_existing_objects(commit) + self.save_new_objects(commit)
[ "def", "save", "(", "self", ",", "commit", "=", "True", ")", ":", "if", "not", "commit", ":", "self", ".", "saved_forms", "=", "[", "]", "def", "save_m2m", "(", ")", ":", "for", "form", "in", "self", ".", "saved_forms", ":", "form", ".", "save_m2m", "(", ")", "self", ".", "save_m2m", "=", "save_m2m", "return", "self", ".", "save_existing_objects", "(", "commit", ")", "+", "self", ".", "save_new_objects", "(", "commit", ")" ]
[ 668, 4 ]
[ 680, 81 ]
python
en
['en', 'error', 'th']
False
BaseModelFormSet.add_fields
(self, form, index)
Add a hidden field for the object's primary key.
Add a hidden field for the object's primary key.
def add_fields(self, form, index): """Add a hidden field for the object's primary key.""" from django.db.models import AutoField, ForeignKey, OneToOneField self._pk_field = pk = self.model._meta.pk # If a pk isn't editable, then it won't be on the form, so we need to # add it here so we can tell which object is which when we get the # data back. Generally, pk.editable should be false, but for some # reason, auto_created pk fields and AutoField's editable attribute is # True, so check for that as well. def pk_is_not_editable(pk): return ( (not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or ( pk.remote_field and pk.remote_field.parent_link and pk_is_not_editable(pk.remote_field.model._meta.pk) ) ) if pk_is_not_editable(pk) or pk.name not in form.fields: if form.is_bound: # If we're adding the related instance, ignore its primary key # as it could be an auto-generated default which isn't actually # in the database. pk_value = None if form.instance._state.adding else form.instance.pk else: try: if index is not None: pk_value = self.get_queryset()[index].pk else: pk_value = None except IndexError: pk_value = None if isinstance(pk, (ForeignKey, OneToOneField)): qs = pk.remote_field.model._default_manager.get_queryset() else: qs = self.model._default_manager.get_queryset() qs = qs.using(form.instance._state.db) if form._meta.widgets: widget = form._meta.widgets.get(self._pk_field.name, HiddenInput) else: widget = HiddenInput form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget) super().add_fields(form, index)
[ "def", "add_fields", "(", "self", ",", "form", ",", "index", ")", ":", "from", "django", ".", "db", ".", "models", "import", "AutoField", ",", "ForeignKey", ",", "OneToOneField", "self", ".", "_pk_field", "=", "pk", "=", "self", ".", "model", ".", "_meta", ".", "pk", "# If a pk isn't editable, then it won't be on the form, so we need to", "# add it here so we can tell which object is which when we get the", "# data back. Generally, pk.editable should be false, but for some", "# reason, auto_created pk fields and AutoField's editable attribute is", "# True, so check for that as well.", "def", "pk_is_not_editable", "(", "pk", ")", ":", "return", "(", "(", "not", "pk", ".", "editable", ")", "or", "(", "pk", ".", "auto_created", "or", "isinstance", "(", "pk", ",", "AutoField", ")", ")", "or", "(", "pk", ".", "remote_field", "and", "pk", ".", "remote_field", ".", "parent_link", "and", "pk_is_not_editable", "(", "pk", ".", "remote_field", ".", "model", ".", "_meta", ".", "pk", ")", ")", ")", "if", "pk_is_not_editable", "(", "pk", ")", "or", "pk", ".", "name", "not", "in", "form", ".", "fields", ":", "if", "form", ".", "is_bound", ":", "# If we're adding the related instance, ignore its primary key", "# as it could be an auto-generated default which isn't actually", "# in the database.", "pk_value", "=", "None", "if", "form", ".", "instance", ".", "_state", ".", "adding", "else", "form", ".", "instance", ".", "pk", "else", ":", "try", ":", "if", "index", "is", "not", "None", ":", "pk_value", "=", "self", ".", "get_queryset", "(", ")", "[", "index", "]", ".", "pk", "else", ":", "pk_value", "=", "None", "except", "IndexError", ":", "pk_value", "=", "None", "if", "isinstance", "(", "pk", ",", "(", "ForeignKey", ",", "OneToOneField", ")", ")", ":", "qs", "=", "pk", ".", "remote_field", ".", "model", ".", "_default_manager", ".", "get_queryset", "(", ")", "else", ":", "qs", "=", "self", ".", "model", ".", "_default_manager", ".", "get_queryset", "(", ")", "qs", "=", "qs", ".", "using", "(", "form", ".", "instance", ".", "_state", ".", "db", ")", "if", "form", ".", "_meta", ".", "widgets", ":", "widget", "=", "form", ".", "_meta", ".", "widgets", ".", "get", "(", "self", ".", "_pk_field", ".", "name", ",", "HiddenInput", ")", "else", ":", "widget", "=", "HiddenInput", "form", ".", "fields", "[", "self", ".", "_pk_field", ".", "name", "]", "=", "ModelChoiceField", "(", "qs", ",", "initial", "=", "pk_value", ",", "required", "=", "False", ",", "widget", "=", "widget", ")", "super", "(", ")", ".", "add_fields", "(", "form", ",", "index", ")" ]
[ 823, 4 ]
[ 864, 39 ]
python
en
['en', 'en', 'en']
True
ModelChoiceField.get_limit_choices_to
(self)
Return ``limit_choices_to`` for this form field. If it is a callable, invoke it and return the result.
Return ``limit_choices_to`` for this form field.
def get_limit_choices_to(self): """ Return ``limit_choices_to`` for this form field. If it is a callable, invoke it and return the result. """ if callable(self.limit_choices_to): return self.limit_choices_to() return self.limit_choices_to
[ "def", "get_limit_choices_to", "(", "self", ")", ":", "if", "callable", "(", "self", ".", "limit_choices_to", ")", ":", "return", "self", ".", "limit_choices_to", "(", ")", "return", "self", ".", "limit_choices_to" ]
[ 1218, 4 ]
[ 1226, 36 ]
python
en
['en', 'error', 'th']
False
ModelChoiceField.label_from_instance
(self, obj)
Convert objects into strings and generate the labels for the choices presented by this object. Subclasses can override this method to customize the display of the choices.
Convert objects into strings and generate the labels for the choices presented by this object. Subclasses can override this method to customize the display of the choices.
def label_from_instance(self, obj): """ Convert objects into strings and generate the labels for the choices presented by this object. Subclasses can override this method to customize the display of the choices. """ return str(obj)
[ "def", "label_from_instance", "(", "self", ",", "obj", ")", ":", "return", "str", "(", "obj", ")" ]
[ 1246, 4 ]
[ 1252, 23 ]
python
en
['en', 'error', 'th']
False
ModelMultipleChoiceField._check_values
(self, value)
Given a list of possible PK values, return a QuerySet of the corresponding objects. Raise a ValidationError if a given value is invalid (not a valid PK, not in the queryset, etc.)
Given a list of possible PK values, return a QuerySet of the corresponding objects. Raise a ValidationError if a given value is invalid (not a valid PK, not in the queryset, etc.)
def _check_values(self, value): """ Given a list of possible PK values, return a QuerySet of the corresponding objects. Raise a ValidationError if a given value is invalid (not a valid PK, not in the queryset, etc.) """ key = self.to_field_name or 'pk' # deduplicate given values to avoid creating many querysets or # requiring the database backend deduplicate efficiently. try: value = frozenset(value) except TypeError: # list of lists isn't hashable, for example raise ValidationError( self.error_messages['invalid_list'], code='invalid_list', ) for pk in value: try: self.queryset.filter(**{key: pk}) except (ValueError, TypeError): raise ValidationError( self.error_messages['invalid_pk_value'], code='invalid_pk_value', params={'pk': pk}, ) qs = self.queryset.filter(**{'%s__in' % key: value}) pks = {str(getattr(o, key)) for o in qs} for val in value: if str(val) not in pks: raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': val}, ) return qs
[ "def", "_check_values", "(", "self", ",", "value", ")", ":", "key", "=", "self", ".", "to_field_name", "or", "'pk'", "# deduplicate given values to avoid creating many querysets or", "# requiring the database backend deduplicate efficiently.", "try", ":", "value", "=", "frozenset", "(", "value", ")", "except", "TypeError", ":", "# list of lists isn't hashable, for example", "raise", "ValidationError", "(", "self", ".", "error_messages", "[", "'invalid_list'", "]", ",", "code", "=", "'invalid_list'", ",", ")", "for", "pk", "in", "value", ":", "try", ":", "self", ".", "queryset", ".", "filter", "(", "*", "*", "{", "key", ":", "pk", "}", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "raise", "ValidationError", "(", "self", ".", "error_messages", "[", "'invalid_pk_value'", "]", ",", "code", "=", "'invalid_pk_value'", ",", "params", "=", "{", "'pk'", ":", "pk", "}", ",", ")", "qs", "=", "self", ".", "queryset", ".", "filter", "(", "*", "*", "{", "'%s__in'", "%", "key", ":", "value", "}", ")", "pks", "=", "{", "str", "(", "getattr", "(", "o", ",", "key", ")", ")", "for", "o", "in", "qs", "}", "for", "val", "in", "value", ":", "if", "str", "(", "val", ")", "not", "in", "pks", ":", "raise", "ValidationError", "(", "self", ".", "error_messages", "[", "'invalid_choice'", "]", ",", "code", "=", "'invalid_choice'", ",", "params", "=", "{", "'value'", ":", "val", "}", ",", ")", "return", "qs" ]
[ 1345, 4 ]
[ 1380, 17 ]
python
en
['en', 'error', 'th']
False
BaseRequest.url_charset
(self)
The charset that is assumed for URLs. Defaults to the value of :attr:`charset`. .. versionadded:: 0.6
The charset that is assumed for URLs. Defaults to the value of :attr:`charset`.
def url_charset(self): """The charset that is assumed for URLs. Defaults to the value of :attr:`charset`. .. versionadded:: 0.6 """ return self.charset
[ "def", "url_charset", "(", "self", ")", ":", "return", "self", ".", "charset" ]
[ 169, 4 ]
[ 175, 27 ]
python
en
['en', 'en', 'en']
True
BaseRequest.from_values
(cls, *args, **kwargs)
Create a new request object based on the values provided. If environ is given missing values are filled from there. This method is useful for small scripts when you need to simulate a request from an URL. Do not use this method for unittesting, there is a full featured client object (:class:`Client`) that allows to create multipart requests, support for cookies etc. This accepts the same options as the :class:`~werkzeug.test.EnvironBuilder`. .. versionchanged:: 0.5 This method now accepts the same arguments as :class:`~werkzeug.test.EnvironBuilder`. Because of this the `environ` parameter is now called `environ_overrides`. :return: request object
Create a new request object based on the values provided. If environ is given missing values are filled from there. This method is useful for small scripts when you need to simulate a request from an URL. Do not use this method for unittesting, there is a full featured client object (:class:`Client`) that allows to create multipart requests, support for cookies etc.
def from_values(cls, *args, **kwargs): """Create a new request object based on the values provided. If environ is given missing values are filled from there. This method is useful for small scripts when you need to simulate a request from an URL. Do not use this method for unittesting, there is a full featured client object (:class:`Client`) that allows to create multipart requests, support for cookies etc. This accepts the same options as the :class:`~werkzeug.test.EnvironBuilder`. .. versionchanged:: 0.5 This method now accepts the same arguments as :class:`~werkzeug.test.EnvironBuilder`. Because of this the `environ` parameter is now called `environ_overrides`. :return: request object """ from ..test import EnvironBuilder charset = kwargs.pop("charset", cls.charset) kwargs["charset"] = charset builder = EnvironBuilder(*args, **kwargs) try: return builder.get_request(cls) finally: builder.close()
[ "def", "from_values", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", ".", "test", "import", "EnvironBuilder", "charset", "=", "kwargs", ".", "pop", "(", "\"charset\"", ",", "cls", ".", "charset", ")", "kwargs", "[", "\"charset\"", "]", "=", "charset", "builder", "=", "EnvironBuilder", "(", "*", "args", ",", "*", "*", "kwargs", ")", "try", ":", "return", "builder", ".", "get_request", "(", "cls", ")", "finally", ":", "builder", ".", "close", "(", ")" ]
[ 178, 4 ]
[ 204, 27 ]
python
en
['en', 'en', 'en']
True
BaseRequest.application
(cls, f)
Decorate a function as responder that accepts the request as the last argument. This works like the :func:`responder` decorator but the function is passed the request object as the last argument and the request object will be closed automatically:: @Request.application def my_wsgi_app(request): return Response('Hello World!') As of Werkzeug 0.14 HTTP exceptions are automatically caught and converted to responses instead of failing. :param f: the WSGI callable to decorate :return: a new WSGI callable
Decorate a function as responder that accepts the request as the last argument. This works like the :func:`responder` decorator but the function is passed the request object as the last argument and the request object will be closed automatically::
def application(cls, f): """Decorate a function as responder that accepts the request as the last argument. This works like the :func:`responder` decorator but the function is passed the request object as the last argument and the request object will be closed automatically:: @Request.application def my_wsgi_app(request): return Response('Hello World!') As of Werkzeug 0.14 HTTP exceptions are automatically caught and converted to responses instead of failing. :param f: the WSGI callable to decorate :return: a new WSGI callable """ #: return a callable that wraps the -2nd argument with the request #: and calls the function with all the arguments up to that one and #: the request. The return value is then called with the latest #: two arguments. This makes it possible to use this decorator for #: both standalone WSGI functions as well as bound methods and #: partially applied functions. from ..exceptions import HTTPException def application(*args): request = cls(args[-2]) with request: try: resp = f(*args[:-2] + (request,)) except HTTPException as e: resp = e.get_response(args[-2]) return resp(*args[-2:]) return update_wrapper(application, f)
[ "def", "application", "(", "cls", ",", "f", ")", ":", "#: return a callable that wraps the -2nd argument with the request", "#: and calls the function with all the arguments up to that one and", "#: the request. The return value is then called with the latest", "#: two arguments. This makes it possible to use this decorator for", "#: both standalone WSGI functions as well as bound methods and", "#: partially applied functions.", "from", ".", ".", "exceptions", "import", "HTTPException", "def", "application", "(", "*", "args", ")", ":", "request", "=", "cls", "(", "args", "[", "-", "2", "]", ")", "with", "request", ":", "try", ":", "resp", "=", "f", "(", "*", "args", "[", ":", "-", "2", "]", "+", "(", "request", ",", ")", ")", "except", "HTTPException", "as", "e", ":", "resp", "=", "e", ".", "get_response", "(", "args", "[", "-", "2", "]", ")", "return", "resp", "(", "*", "args", "[", "-", "2", ":", "]", ")", "return", "update_wrapper", "(", "application", ",", "f", ")" ]
[ 207, 4 ]
[ 241, 45 ]
python
en
['en', 'en', 'en']
True
BaseRequest._get_file_stream
( self, total_content_length, content_type, filename=None, content_length=None )
Called to get a stream for the file upload. This must provide a file-like class with `read()`, `readline()` and `seek()` methods that is both writeable and readable. The default implementation returns a temporary file if the total content length is higher than 500KB. Because many browsers do not provide a content length for the files only the total content length matters. :param total_content_length: the total content length of all the data in the request combined. This value is guaranteed to be there. :param content_type: the mimetype of the uploaded file. :param filename: the filename of the uploaded file. May be `None`. :param content_length: the length of this file. This value is usually not provided because webbrowsers do not provide this value.
Called to get a stream for the file upload.
def _get_file_stream( self, total_content_length, content_type, filename=None, content_length=None ): """Called to get a stream for the file upload. This must provide a file-like class with `read()`, `readline()` and `seek()` methods that is both writeable and readable. The default implementation returns a temporary file if the total content length is higher than 500KB. Because many browsers do not provide a content length for the files only the total content length matters. :param total_content_length: the total content length of all the data in the request combined. This value is guaranteed to be there. :param content_type: the mimetype of the uploaded file. :param filename: the filename of the uploaded file. May be `None`. :param content_length: the length of this file. This value is usually not provided because webbrowsers do not provide this value. """ return default_stream_factory( total_content_length=total_content_length, filename=filename, content_type=content_type, content_length=content_length, )
[ "def", "_get_file_stream", "(", "self", ",", "total_content_length", ",", "content_type", ",", "filename", "=", "None", ",", "content_length", "=", "None", ")", ":", "return", "default_stream_factory", "(", "total_content_length", "=", "total_content_length", ",", "filename", "=", "filename", ",", "content_type", "=", "content_type", ",", "content_length", "=", "content_length", ",", ")" ]
[ 243, 4 ]
[ 270, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest.want_form_data_parsed
(self)
Returns True if the request method carries content. As of Werkzeug 0.9 this will be the case if a content type is transmitted. .. versionadded:: 0.8
Returns True if the request method carries content. As of Werkzeug 0.9 this will be the case if a content type is transmitted.
def want_form_data_parsed(self): """Returns True if the request method carries content. As of Werkzeug 0.9 this will be the case if a content type is transmitted. .. versionadded:: 0.8 """ return bool(self.environ.get("CONTENT_TYPE"))
[ "def", "want_form_data_parsed", "(", "self", ")", ":", "return", "bool", "(", "self", ".", "environ", ".", "get", "(", "\"CONTENT_TYPE\"", ")", ")" ]
[ 273, 4 ]
[ 279, 53 ]
python
en
['en', 'en', 'en']
True
BaseRequest.make_form_data_parser
(self)
Creates the form data parser. Instantiates the :attr:`form_data_parser_class` with some parameters. .. versionadded:: 0.8
Creates the form data parser. Instantiates the :attr:`form_data_parser_class` with some parameters.
def make_form_data_parser(self): """Creates the form data parser. Instantiates the :attr:`form_data_parser_class` with some parameters. .. versionadded:: 0.8 """ return self.form_data_parser_class( self._get_file_stream, self.charset, self.encoding_errors, self.max_form_memory_size, self.max_content_length, self.parameter_storage_class, )
[ "def", "make_form_data_parser", "(", "self", ")", ":", "return", "self", ".", "form_data_parser_class", "(", "self", ".", "_get_file_stream", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ",", "self", ".", "max_form_memory_size", ",", "self", ".", "max_content_length", ",", "self", ".", "parameter_storage_class", ",", ")" ]
[ 281, 4 ]
[ 294, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest._load_form_data
(self)
Method used internally to retrieve submitted data. After calling this sets `form` and `files` on the request object to multi dicts filled with the incoming form data. As a matter of fact the input stream will be empty afterwards. You can also call this method to force the parsing of the form data. .. versionadded:: 0.8
Method used internally to retrieve submitted data. After calling this sets `form` and `files` on the request object to multi dicts filled with the incoming form data. As a matter of fact the input stream will be empty afterwards. You can also call this method to force the parsing of the form data.
def _load_form_data(self): """Method used internally to retrieve submitted data. After calling this sets `form` and `files` on the request object to multi dicts filled with the incoming form data. As a matter of fact the input stream will be empty afterwards. You can also call this method to force the parsing of the form data. .. versionadded:: 0.8 """ # abort early if we have already consumed the stream if "form" in self.__dict__: return _assert_not_shallow(self) if self.want_form_data_parsed: content_type = self.environ.get("CONTENT_TYPE", "") content_length = get_content_length(self.environ) mimetype, options = parse_options_header(content_type) parser = self.make_form_data_parser() data = parser.parse( self._get_stream_for_parsing(), mimetype, content_length, options ) else: data = ( self.stream, self.parameter_storage_class(), self.parameter_storage_class(), ) # inject the values into the instance dict so that we bypass # our cached_property non-data descriptor. d = self.__dict__ d["stream"], d["form"], d["files"] = data
[ "def", "_load_form_data", "(", "self", ")", ":", "# abort early if we have already consumed the stream", "if", "\"form\"", "in", "self", ".", "__dict__", ":", "return", "_assert_not_shallow", "(", "self", ")", "if", "self", ".", "want_form_data_parsed", ":", "content_type", "=", "self", ".", "environ", ".", "get", "(", "\"CONTENT_TYPE\"", ",", "\"\"", ")", "content_length", "=", "get_content_length", "(", "self", ".", "environ", ")", "mimetype", ",", "options", "=", "parse_options_header", "(", "content_type", ")", "parser", "=", "self", ".", "make_form_data_parser", "(", ")", "data", "=", "parser", ".", "parse", "(", "self", ".", "_get_stream_for_parsing", "(", ")", ",", "mimetype", ",", "content_length", ",", "options", ")", "else", ":", "data", "=", "(", "self", ".", "stream", ",", "self", ".", "parameter_storage_class", "(", ")", ",", "self", ".", "parameter_storage_class", "(", ")", ",", ")", "# inject the values into the instance dict so that we bypass", "# our cached_property non-data descriptor.", "d", "=", "self", ".", "__dict__", "d", "[", "\"stream\"", "]", ",", "d", "[", "\"form\"", "]", ",", "d", "[", "\"files\"", "]", "=", "data" ]
[ 296, 4 ]
[ 329, 49 ]
python
en
['en', 'en', 'en']
True
BaseRequest._get_stream_for_parsing
(self)
This is the same as accessing :attr:`stream` with the difference that if it finds cached data from calling :meth:`get_data` first it will create a new stream out of the cached data. .. versionadded:: 0.9.3
This is the same as accessing :attr:`stream` with the difference that if it finds cached data from calling :meth:`get_data` first it will create a new stream out of the cached data.
def _get_stream_for_parsing(self): """This is the same as accessing :attr:`stream` with the difference that if it finds cached data from calling :meth:`get_data` first it will create a new stream out of the cached data. .. versionadded:: 0.9.3 """ cached_data = getattr(self, "_cached_data", None) if cached_data is not None: return BytesIO(cached_data) return self.stream
[ "def", "_get_stream_for_parsing", "(", "self", ")", ":", "cached_data", "=", "getattr", "(", "self", ",", "\"_cached_data\"", ",", "None", ")", "if", "cached_data", "is", "not", "None", ":", "return", "BytesIO", "(", "cached_data", ")", "return", "self", ".", "stream" ]
[ 331, 4 ]
[ 341, 26 ]
python
en
['en', 'en', 'en']
True
BaseRequest.close
(self)
Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it. .. versionadded:: 0.9
Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it.
def close(self): """Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it. .. versionadded:: 0.9 """ files = self.__dict__.get("files") for _key, value in iter_multi_items(files or ()): value.close()
[ "def", "close", "(", "self", ")", ":", "files", "=", "self", ".", "__dict__", ".", "get", "(", "\"files\"", ")", "for", "_key", ",", "value", "in", "iter_multi_items", "(", "files", "or", "(", ")", ")", ":", "value", ".", "close", "(", ")" ]
[ 343, 4 ]
[ 352, 25 ]
python
en
['en', 'en', 'en']
True
BaseRequest.stream
(self)
If the incoming form data was not encoded with a known mimetype the data is stored unmodified in this stream for consumption. Most of the time it is a better idea to use :attr:`data` which will give you that data as a string. The stream only returns the data once. Unlike :attr:`input_stream` this stream is properly guarded that you can't accidentally read past the length of the input. Werkzeug will internally always refer to this stream to read data which makes it possible to wrap this object with a stream that does filtering. .. versionchanged:: 0.9 This stream is now always available but might be consumed by the form parser later on. Previously the stream was only set if no parsing happened.
If the incoming form data was not encoded with a known mimetype the data is stored unmodified in this stream for consumption. Most of the time it is a better idea to use :attr:`data` which will give you that data as a string. The stream only returns the data once.
def stream(self): """ If the incoming form data was not encoded with a known mimetype the data is stored unmodified in this stream for consumption. Most of the time it is a better idea to use :attr:`data` which will give you that data as a string. The stream only returns the data once. Unlike :attr:`input_stream` this stream is properly guarded that you can't accidentally read past the length of the input. Werkzeug will internally always refer to this stream to read data which makes it possible to wrap this object with a stream that does filtering. .. versionchanged:: 0.9 This stream is now always available but might be consumed by the form parser later on. Previously the stream was only set if no parsing happened. """ _assert_not_shallow(self) return get_input_stream(self.environ)
[ "def", "stream", "(", "self", ")", ":", "_assert_not_shallow", "(", "self", ")", "return", "get_input_stream", "(", "self", ".", "environ", ")" ]
[ 361, 4 ]
[ 379, 45 ]
python
en
['en', 'error', 'th']
False
BaseRequest.args
(self)
The parsed URL parameters (the part in the URL after the question mark). By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important.
The parsed URL parameters (the part in the URL after the question mark).
def args(self): """The parsed URL parameters (the part in the URL after the question mark). By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important. """ return url_decode( wsgi_get_bytes(self.environ.get("QUERY_STRING", "")), self.url_charset, errors=self.encoding_errors, cls=self.parameter_storage_class, )
[ "def", "args", "(", "self", ")", ":", "return", "url_decode", "(", "wsgi_get_bytes", "(", "self", ".", "environ", ".", "get", "(", "\"QUERY_STRING\"", ",", "\"\"", ")", ")", ",", "self", ".", "url_charset", ",", "errors", "=", "self", ".", "encoding_errors", ",", "cls", "=", "self", ".", "parameter_storage_class", ",", ")" ]
[ 391, 4 ]
[ 406, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest.data
(self)
Contains the incoming request data as string in case it came with a mimetype Werkzeug does not handle.
Contains the incoming request data as string in case it came with a mimetype Werkzeug does not handle.
def data(self): """ Contains the incoming request data as string in case it came with a mimetype Werkzeug does not handle. """ if self.disable_data_descriptor: raise AttributeError("data descriptor is disabled") # XXX: this should eventually be deprecated. # We trigger form data parsing first which means that the descriptor # will not cache the data that would otherwise be .form or .files # data. This restores the behavior that was there in Werkzeug # before 0.9. New code should use :meth:`get_data` explicitly as # this will make behavior explicit. return self.get_data(parse_form_data=True)
[ "def", "data", "(", "self", ")", ":", "if", "self", ".", "disable_data_descriptor", ":", "raise", "AttributeError", "(", "\"data descriptor is disabled\"", ")", "# XXX: this should eventually be deprecated.", "# We trigger form data parsing first which means that the descriptor", "# will not cache the data that would otherwise be .form or .files", "# data. This restores the behavior that was there in Werkzeug", "# before 0.9. New code should use :meth:`get_data` explicitly as", "# this will make behavior explicit.", "return", "self", ".", "get_data", "(", "parse_form_data", "=", "True", ")" ]
[ 409, 4 ]
[ 424, 50 ]
python
en
['en', 'error', 'th']
False
BaseRequest.get_data
(self, cache=True, as_text=False, parse_form_data=False)
This reads the buffered incoming data from the client into one bytestring. By default this is cached but that behavior can be changed by setting `cache` to `False`. Usually it's a bad idea to call this method without checking the content length first as a client could send dozens of megabytes or more to cause memory problems on the server. Note that if the form data was already parsed this method will not return anything as form data parsing does not cache the data like this method does. To implicitly invoke form data parsing function set `parse_form_data` to `True`. When this is done the return value of this method will be an empty string if the form parser handles the data. This generally is not necessary as if the whole data is cached (which is the default) the form parser will used the cached data to parse the form data. Please be generally aware of checking the content length first in any case before calling this method to avoid exhausting server memory. If `as_text` is set to `True` the return value will be a decoded unicode string. .. versionadded:: 0.9
This reads the buffered incoming data from the client into one bytestring. By default this is cached but that behavior can be changed by setting `cache` to `False`.
def get_data(self, cache=True, as_text=False, parse_form_data=False): """This reads the buffered incoming data from the client into one bytestring. By default this is cached but that behavior can be changed by setting `cache` to `False`. Usually it's a bad idea to call this method without checking the content length first as a client could send dozens of megabytes or more to cause memory problems on the server. Note that if the form data was already parsed this method will not return anything as form data parsing does not cache the data like this method does. To implicitly invoke form data parsing function set `parse_form_data` to `True`. When this is done the return value of this method will be an empty string if the form parser handles the data. This generally is not necessary as if the whole data is cached (which is the default) the form parser will used the cached data to parse the form data. Please be generally aware of checking the content length first in any case before calling this method to avoid exhausting server memory. If `as_text` is set to `True` the return value will be a decoded unicode string. .. versionadded:: 0.9 """ rv = getattr(self, "_cached_data", None) if rv is None: if parse_form_data: self._load_form_data() rv = self.stream.read() if cache: self._cached_data = rv if as_text: rv = rv.decode(self.charset, self.encoding_errors) return rv
[ "def", "get_data", "(", "self", ",", "cache", "=", "True", ",", "as_text", "=", "False", ",", "parse_form_data", "=", "False", ")", ":", "rv", "=", "getattr", "(", "self", ",", "\"_cached_data\"", ",", "None", ")", "if", "rv", "is", "None", ":", "if", "parse_form_data", ":", "self", ".", "_load_form_data", "(", ")", "rv", "=", "self", ".", "stream", ".", "read", "(", ")", "if", "cache", ":", "self", ".", "_cached_data", "=", "rv", "if", "as_text", ":", "rv", "=", "rv", ".", "decode", "(", "self", ".", "charset", ",", "self", ".", "encoding_errors", ")", "return", "rv" ]
[ 426, 4 ]
[ 460, 17 ]
python
en
['en', 'en', 'en']
True
BaseRequest.form
(self)
The form parameters. By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important. Please keep in mind that file uploads will not end up here, but instead in the :attr:`files` attribute. .. versionchanged:: 0.9 Previous to Werkzeug 0.9 this would only contain form data for POST and PUT requests.
The form parameters. By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important.
def form(self): """The form parameters. By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important. Please keep in mind that file uploads will not end up here, but instead in the :attr:`files` attribute. .. versionchanged:: 0.9 Previous to Werkzeug 0.9 this would only contain form data for POST and PUT requests. """ self._load_form_data() return self.form
[ "def", "form", "(", "self", ")", ":", "self", ".", "_load_form_data", "(", ")", "return", "self", ".", "form" ]
[ 463, 4 ]
[ 479, 24 ]
python
en
['en', 'en', 'en']
True
BaseRequest.values
(self)
A :class:`werkzeug.datastructures.CombinedMultiDict` that combines :attr:`args` and :attr:`form`.
A :class:`werkzeug.datastructures.CombinedMultiDict` that combines :attr:`args` and :attr:`form`.
def values(self): """A :class:`werkzeug.datastructures.CombinedMultiDict` that combines :attr:`args` and :attr:`form`.""" args = [] for d in self.args, self.form: if not isinstance(d, MultiDict): d = MultiDict(d) args.append(d) return CombinedMultiDict(args)
[ "def", "values", "(", "self", ")", ":", "args", "=", "[", "]", "for", "d", "in", "self", ".", "args", ",", "self", ".", "form", ":", "if", "not", "isinstance", "(", "d", ",", "MultiDict", ")", ":", "d", "=", "MultiDict", "(", "d", ")", "args", ".", "append", "(", "d", ")", "return", "CombinedMultiDict", "(", "args", ")" ]
[ 482, 4 ]
[ 490, 38 ]
python
en
['en', 'fr', 'en']
True
BaseRequest.files
(self)
:class:`~werkzeug.datastructures.MultiDict` object containing all uploaded files. Each key in :attr:`files` is the name from the ``<input type="file" name="">``. Each value in :attr:`files` is a Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. It basically behaves like a standard file object you know from Python, with the difference that it also has a :meth:`~werkzeug.datastructures.FileStorage.save` function that can store the file on the filesystem. Note that :attr:`files` will only contain data if the request method was POST, PUT or PATCH and the ``<form>`` that posted to the request had ``enctype="multipart/form-data"``. It will be empty otherwise. See the :class:`~werkzeug.datastructures.MultiDict` / :class:`~werkzeug.datastructures.FileStorage` documentation for more details about the used data structure.
:class:`~werkzeug.datastructures.MultiDict` object containing all uploaded files. Each key in :attr:`files` is the name from the ``<input type="file" name="">``. Each value in :attr:`files` is a Werkzeug :class:`~werkzeug.datastructures.FileStorage` object.
def files(self): """:class:`~werkzeug.datastructures.MultiDict` object containing all uploaded files. Each key in :attr:`files` is the name from the ``<input type="file" name="">``. Each value in :attr:`files` is a Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. It basically behaves like a standard file object you know from Python, with the difference that it also has a :meth:`~werkzeug.datastructures.FileStorage.save` function that can store the file on the filesystem. Note that :attr:`files` will only contain data if the request method was POST, PUT or PATCH and the ``<form>`` that posted to the request had ``enctype="multipart/form-data"``. It will be empty otherwise. See the :class:`~werkzeug.datastructures.MultiDict` / :class:`~werkzeug.datastructures.FileStorage` documentation for more details about the used data structure. """ self._load_form_data() return self.files
[ "def", "files", "(", "self", ")", ":", "self", ".", "_load_form_data", "(", ")", "return", "self", ".", "files" ]
[ 493, 4 ]
[ 513, 25 ]
python
de
['de', 'en', 'de']
True
BaseRequest.cookies
(self)
A :class:`dict` with the contents of all cookies transmitted with the request.
A :class:`dict` with the contents of all cookies transmitted with the request.
def cookies(self): """A :class:`dict` with the contents of all cookies transmitted with the request.""" return parse_cookie( self.environ, self.charset, self.encoding_errors, cls=self.dict_storage_class, )
[ "def", "cookies", "(", "self", ")", ":", "return", "parse_cookie", "(", "self", ".", "environ", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ",", "cls", "=", "self", ".", "dict_storage_class", ",", ")" ]
[ 516, 4 ]
[ 524, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest.headers
(self)
The headers from the WSGI environ as immutable :class:`~werkzeug.datastructures.EnvironHeaders`.
The headers from the WSGI environ as immutable :class:`~werkzeug.datastructures.EnvironHeaders`.
def headers(self): """The headers from the WSGI environ as immutable :class:`~werkzeug.datastructures.EnvironHeaders`. """ return EnvironHeaders(self.environ)
[ "def", "headers", "(", "self", ")", ":", "return", "EnvironHeaders", "(", "self", ".", "environ", ")" ]
[ 527, 4 ]
[ 531, 43 ]
python
en
['en', 'en', 'en']
True
BaseRequest.path
(self)
Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will always include a leading slash, even if the URL root is accessed.
Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will always include a leading slash, even if the URL root is accessed.
def path(self): """Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will always include a leading slash, even if the URL root is accessed. """ raw_path = wsgi_decoding_dance( self.environ.get("PATH_INFO") or "", self.charset, self.encoding_errors ) return "/" + raw_path.lstrip("/")
[ "def", "path", "(", "self", ")", ":", "raw_path", "=", "wsgi_decoding_dance", "(", "self", ".", "environ", ".", "get", "(", "\"PATH_INFO\"", ")", "or", "\"\"", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ")", "return", "\"/\"", "+", "raw_path", ".", "lstrip", "(", "\"/\"", ")" ]
[ 534, 4 ]
[ 542, 41 ]
python
en
['en', 'en', 'en']
True
BaseRequest.full_path
(self)
Requested path as unicode, including the query string.
Requested path as unicode, including the query string.
def full_path(self): """Requested path as unicode, including the query string.""" return self.path + u"?" + to_unicode(self.query_string, self.url_charset)
[ "def", "full_path", "(", "self", ")", ":", "return", "self", ".", "path", "+", "u\"?\"", "+", "to_unicode", "(", "self", ".", "query_string", ",", "self", ".", "url_charset", ")" ]
[ 545, 4 ]
[ 547, 81 ]
python
en
['en', 'en', 'en']
True
BaseRequest.script_root
(self)
The root path of the script without the trailing slash.
The root path of the script without the trailing slash.
def script_root(self): """The root path of the script without the trailing slash.""" raw_path = wsgi_decoding_dance( self.environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors ) return raw_path.rstrip("/")
[ "def", "script_root", "(", "self", ")", ":", "raw_path", "=", "wsgi_decoding_dance", "(", "self", ".", "environ", ".", "get", "(", "\"SCRIPT_NAME\"", ")", "or", "\"\"", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ")", "return", "raw_path", ".", "rstrip", "(", "\"/\"", ")" ]
[ 550, 4 ]
[ 555, 35 ]
python
en
['en', 'en', 'en']
True
BaseRequest.url
(self)
The reconstructed current URL as IRI. See also: :attr:`trusted_hosts`.
The reconstructed current URL as IRI. See also: :attr:`trusted_hosts`.
def url(self): """The reconstructed current URL as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url(self.environ, trusted_hosts=self.trusted_hosts)
[ "def", "url", "(", "self", ")", ":", "return", "get_current_url", "(", "self", ".", "environ", ",", "trusted_hosts", "=", "self", ".", "trusted_hosts", ")" ]
[ 558, 4 ]
[ 562, 78 ]
python
en
['en', 'en', 'en']
True
BaseRequest.base_url
(self)
Like :attr:`url` but without the querystring See also: :attr:`trusted_hosts`.
Like :attr:`url` but without the querystring See also: :attr:`trusted_hosts`.
def base_url(self): """Like :attr:`url` but without the querystring See also: :attr:`trusted_hosts`. """ return get_current_url( self.environ, strip_querystring=True, trusted_hosts=self.trusted_hosts )
[ "def", "base_url", "(", "self", ")", ":", "return", "get_current_url", "(", "self", ".", "environ", ",", "strip_querystring", "=", "True", ",", "trusted_hosts", "=", "self", ".", "trusted_hosts", ")" ]
[ 565, 4 ]
[ 571, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest.url_root
(self)
The full URL root (with hostname), this is the application root as IRI. See also: :attr:`trusted_hosts`.
The full URL root (with hostname), this is the application root as IRI. See also: :attr:`trusted_hosts`.
def url_root(self): """The full URL root (with hostname), this is the application root as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url(self.environ, True, trusted_hosts=self.trusted_hosts)
[ "def", "url_root", "(", "self", ")", ":", "return", "get_current_url", "(", "self", ".", "environ", ",", "True", ",", "trusted_hosts", "=", "self", ".", "trusted_hosts", ")" ]
[ 574, 4 ]
[ 579, 84 ]
python
en
['en', 'en', 'en']
True
BaseRequest.host_url
(self)
Just the host with scheme as IRI. See also: :attr:`trusted_hosts`.
Just the host with scheme as IRI. See also: :attr:`trusted_hosts`.
def host_url(self): """Just the host with scheme as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url( self.environ, host_only=True, trusted_hosts=self.trusted_hosts )
[ "def", "host_url", "(", "self", ")", ":", "return", "get_current_url", "(", "self", ".", "environ", ",", "host_only", "=", "True", ",", "trusted_hosts", "=", "self", ".", "trusted_hosts", ")" ]
[ 582, 4 ]
[ 588, 9 ]
python
en
['en', 'en', 'en']
True
BaseRequest.host
(self)
Just the host including the port if available. See also: :attr:`trusted_hosts`.
Just the host including the port if available. See also: :attr:`trusted_hosts`.
def host(self): """Just the host including the port if available. See also: :attr:`trusted_hosts`. """ return get_host(self.environ, trusted_hosts=self.trusted_hosts)
[ "def", "host", "(", "self", ")", ":", "return", "get_host", "(", "self", ".", "environ", ",", "trusted_hosts", "=", "self", ".", "trusted_hosts", ")" ]
[ 591, 4 ]
[ 595, 71 ]
python
en
['en', 'en', 'en']
True
BaseRequest.access_route
(self)
If a forwarded header exists this is a list of all ip addresses from the client ip to the last proxy server.
If a forwarded header exists this is a list of all ip addresses from the client ip to the last proxy server.
def access_route(self): """If a forwarded header exists this is a list of all ip addresses from the client ip to the last proxy server. """ if "HTTP_X_FORWARDED_FOR" in self.environ: addr = self.environ["HTTP_X_FORWARDED_FOR"].split(",") return self.list_storage_class([x.strip() for x in addr]) elif "REMOTE_ADDR" in self.environ: return self.list_storage_class([self.environ["REMOTE_ADDR"]]) return self.list_storage_class()
[ "def", "access_route", "(", "self", ")", ":", "if", "\"HTTP_X_FORWARDED_FOR\"", "in", "self", ".", "environ", ":", "addr", "=", "self", ".", "environ", "[", "\"HTTP_X_FORWARDED_FOR\"", "]", ".", "split", "(", "\",\"", ")", "return", "self", ".", "list_storage_class", "(", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "addr", "]", ")", "elif", "\"REMOTE_ADDR\"", "in", "self", ".", "environ", ":", "return", "self", ".", "list_storage_class", "(", "[", "self", ".", "environ", "[", "\"REMOTE_ADDR\"", "]", "]", ")", "return", "self", ".", "list_storage_class", "(", ")" ]
[ 613, 4 ]
[ 622, 40 ]
python
en
['en', 'en', 'en']
True
BaseRequest.remote_addr
(self)
The remote address of the client.
The remote address of the client.
def remote_addr(self): """The remote address of the client.""" return self.environ.get("REMOTE_ADDR")
[ "def", "remote_addr", "(", "self", ")", ":", "return", "self", ".", "environ", ".", "get", "(", "\"REMOTE_ADDR\"", ")" ]
[ 625, 4 ]
[ 627, 46 ]
python
en
['en', 'en', 'en']
True
BaseRequest.is_xhr
(self)
True if the request was triggered via a JavaScript XMLHttpRequest. This only works with libraries that support the ``X-Requested-With`` header and set it to "XMLHttpRequest". Libraries that do that are prototype, jQuery and Mochikit and probably some more. .. deprecated:: 0.13 ``X-Requested-With`` is not standard and is unreliable. You may be able to use :attr:`AcceptMixin.accept_mimetypes` instead.
True if the request was triggered via a JavaScript XMLHttpRequest. This only works with libraries that support the ``X-Requested-With`` header and set it to "XMLHttpRequest". Libraries that do that are prototype, jQuery and Mochikit and probably some more.
def is_xhr(self): """True if the request was triggered via a JavaScript XMLHttpRequest. This only works with libraries that support the ``X-Requested-With`` header and set it to "XMLHttpRequest". Libraries that do that are prototype, jQuery and Mochikit and probably some more. .. deprecated:: 0.13 ``X-Requested-With`` is not standard and is unreliable. You may be able to use :attr:`AcceptMixin.accept_mimetypes` instead. """ warnings.warn( "'Request.is_xhr' is deprecated as of version 0.13 and will" " be removed in version 1.0. The 'X-Requested-With' header" " is not standard and is unreliable. You may be able to use" " 'accept_mimetypes' instead.", DeprecationWarning, stacklevel=2, ) return self.environ.get("HTTP_X_REQUESTED_WITH", "").lower() == "xmlhttprequest"
[ "def", "is_xhr", "(", "self", ")", ":", "warnings", ".", "warn", "(", "\"'Request.is_xhr' is deprecated as of version 0.13 and will\"", "\" be removed in version 1.0. The 'X-Requested-With' header\"", "\" is not standard and is unreliable. You may be able to use\"", "\" 'accept_mimetypes' instead.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "return", "self", ".", "environ", ".", "get", "(", "\"HTTP_X_REQUESTED_WITH\"", ",", "\"\"", ")", ".", "lower", "(", ")", "==", "\"xmlhttprequest\"" ]
[ 645, 4 ]
[ 664, 88 ]
python
en
['en', 'en', 'en']
True
linux_distribution
(full_distribution_name=True)
Return information about the current OS distribution as a tuple ``(id_name, version, codename)`` with items as follows: * ``id_name``: If *full_distribution_name* is false, the result of :func:`distro.id`. Otherwise, the result of :func:`distro.name`. * ``version``: The result of :func:`distro.version`. * ``codename``: The result of :func:`distro.codename`. The interface of this function is compatible with the original :py:func:`platform.linux_distribution` function, supporting a subset of its parameters. The data it returns may not exactly be the same, because it uses more data sources than the original function, and that may lead to different data if the OS distribution is not consistent across multiple data sources it provides (there are indeed such distributions ...). Another reason for differences is the fact that the :func:`distro.id` method normalizes the distro ID string to a reliable machine-readable value for a number of popular OS distributions.
Return information about the current OS distribution as a tuple ``(id_name, version, codename)`` with items as follows:
def linux_distribution(full_distribution_name=True): """ Return information about the current OS distribution as a tuple ``(id_name, version, codename)`` with items as follows: * ``id_name``: If *full_distribution_name* is false, the result of :func:`distro.id`. Otherwise, the result of :func:`distro.name`. * ``version``: The result of :func:`distro.version`. * ``codename``: The result of :func:`distro.codename`. The interface of this function is compatible with the original :py:func:`platform.linux_distribution` function, supporting a subset of its parameters. The data it returns may not exactly be the same, because it uses more data sources than the original function, and that may lead to different data if the OS distribution is not consistent across multiple data sources it provides (there are indeed such distributions ...). Another reason for differences is the fact that the :func:`distro.id` method normalizes the distro ID string to a reliable machine-readable value for a number of popular OS distributions. """ return _distro.linux_distribution(full_distribution_name)
[ "def", "linux_distribution", "(", "full_distribution_name", "=", "True", ")", ":", "return", "_distro", ".", "linux_distribution", "(", "full_distribution_name", ")" ]
[ 99, 0 ]
[ 124, 61 ]
python
en
['en', 'error', 'th']
False
id
()
Return the distro ID of the current distribution, as a machine-readable string. For a number of OS distributions, the returned distro ID value is *reliable*, in the sense that it is documented and that it does not change across releases of the distribution. This package maintains the following reliable distro ID values: ============== ========================================= Distro ID Distribution ============== ========================================= "ubuntu" Ubuntu "debian" Debian "rhel" RedHat Enterprise Linux "centos" CentOS "fedora" Fedora "sles" SUSE Linux Enterprise Server "opensuse" openSUSE "amazon" Amazon Linux "arch" Arch Linux "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux "gentoo" GenToo Linux "ibm_powerkvm" IBM PowerKVM "kvmibm" KVM for IBM z Systems "linuxmint" Linux Mint "mageia" Mageia "mandriva" Mandriva Linux "parallels" Parallels "pidora" Pidora "raspbian" Raspbian "oracle" Oracle Linux (and Oracle Enterprise Linux) "scientific" Scientific Linux "slackware" Slackware "xenserver" XenServer "openbsd" OpenBSD "netbsd" NetBSD "freebsd" FreeBSD "midnightbsd" MidnightBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, or if you find that the :func:`distro.id` function returns a different distro ID for one of the listed distros, please create an issue in the `distro issue tracker`_. **Lookup hierarchy and transformations:** First, the ID is obtained from the following sources, in the specified order. The first available and non-empty value is used: * the value of the "ID" attribute of the os-release file, * the value of the "Distributor ID" attribute returned by the lsb_release command, * the first part of the file name of the distro release file, The so determined ID value then passes the following transformations, before it is returned by this method: * it is translated to lower case, * blanks (which should not be there anyway) are translated to underscores, * a normalization of the ID is performed, based upon `normalization tables`_. The purpose of this normalization is to ensure that the ID is as reliable as possible, even across incompatible changes in the OS distributions. A common reason for an incompatible change is the addition of an os-release file, or the addition of the lsb_release command, with ID values that differ from what was previously determined from the distro release file name.
Return the distro ID of the current distribution, as a machine-readable string.
def id(): """ Return the distro ID of the current distribution, as a machine-readable string. For a number of OS distributions, the returned distro ID value is *reliable*, in the sense that it is documented and that it does not change across releases of the distribution. This package maintains the following reliable distro ID values: ============== ========================================= Distro ID Distribution ============== ========================================= "ubuntu" Ubuntu "debian" Debian "rhel" RedHat Enterprise Linux "centos" CentOS "fedora" Fedora "sles" SUSE Linux Enterprise Server "opensuse" openSUSE "amazon" Amazon Linux "arch" Arch Linux "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux "gentoo" GenToo Linux "ibm_powerkvm" IBM PowerKVM "kvmibm" KVM for IBM z Systems "linuxmint" Linux Mint "mageia" Mageia "mandriva" Mandriva Linux "parallels" Parallels "pidora" Pidora "raspbian" Raspbian "oracle" Oracle Linux (and Oracle Enterprise Linux) "scientific" Scientific Linux "slackware" Slackware "xenserver" XenServer "openbsd" OpenBSD "netbsd" NetBSD "freebsd" FreeBSD "midnightbsd" MidnightBSD ============== ========================================= If you have a need to get distros for reliable IDs added into this set, or if you find that the :func:`distro.id` function returns a different distro ID for one of the listed distros, please create an issue in the `distro issue tracker`_. **Lookup hierarchy and transformations:** First, the ID is obtained from the following sources, in the specified order. The first available and non-empty value is used: * the value of the "ID" attribute of the os-release file, * the value of the "Distributor ID" attribute returned by the lsb_release command, * the first part of the file name of the distro release file, The so determined ID value then passes the following transformations, before it is returned by this method: * it is translated to lower case, * blanks (which should not be there anyway) are translated to underscores, * a normalization of the ID is performed, based upon `normalization tables`_. The purpose of this normalization is to ensure that the ID is as reliable as possible, even across incompatible changes in the OS distributions. A common reason for an incompatible change is the addition of an os-release file, or the addition of the lsb_release command, with ID values that differ from what was previously determined from the distro release file name. """ return _distro.id()
[ "def", "id", "(", ")", ":", "return", "_distro", ".", "id", "(", ")" ]
[ 127, 0 ]
[ 203, 23 ]
python
en
['en', 'error', 'th']
False
name
(pretty=False)
Return the name of the current OS distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. (e.g. "CentOS Linux") If *pretty* is true, the version and codename are appended. (e.g. "CentOS Linux 7.1.1503 (Core)") **Lookup hierarchy:** The name is obtained from the following sources, in the specified order. The first available and non-empty value is used: * If *pretty* is false: - the value of the "NAME" attribute of the os-release file, - the value of the "Distributor ID" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file. * If *pretty* is true: - the value of the "PRETTY_NAME" attribute of the os-release file, - the value of the "Description" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file, appended with the value of the pretty version ("<version_id>" and "<codename>" fields) of the distro release file, if available.
Return the name of the current OS distribution, as a human-readable string.
def name(pretty=False): """ Return the name of the current OS distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. (e.g. "CentOS Linux") If *pretty* is true, the version and codename are appended. (e.g. "CentOS Linux 7.1.1503 (Core)") **Lookup hierarchy:** The name is obtained from the following sources, in the specified order. The first available and non-empty value is used: * If *pretty* is false: - the value of the "NAME" attribute of the os-release file, - the value of the "Distributor ID" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file. * If *pretty* is true: - the value of the "PRETTY_NAME" attribute of the os-release file, - the value of the "Description" attribute returned by the lsb_release command, - the value of the "<name>" field of the distro release file, appended with the value of the pretty version ("<version_id>" and "<codename>" fields) of the distro release file, if available. """ return _distro.name(pretty)
[ "def", "name", "(", "pretty", "=", "False", ")", ":", "return", "_distro", ".", "name", "(", "pretty", ")" ]
[ 206, 0 ]
[ 242, 31 ]
python
en
['en', 'error', 'th']
False
version
(pretty=False, best=False)
Return the version of the current OS distribution, as a human-readable string. If *pretty* is false, the version is returned without codename (e.g. "7.0"). If *pretty* is true, the codename in parenthesis is appended, if the codename is non-empty (e.g. "7.0 (Maipo)"). Some distributions provide version numbers with different precisions in the different sources of distribution information. Examining the different sources in a fixed priority order does not always yield the most precise version (e.g. for Debian 8.2, or CentOS 7.1). The *best* parameter can be used to control the approach for the returned version: If *best* is false, the first non-empty version number in priority order of the examined sources is returned. If *best* is true, the most precise version number out of all examined sources is returned. **Lookup hierarchy:** In all cases, the version number is obtained from the following sources. If *best* is false, this order represents the priority order: * the value of the "VERSION_ID" attribute of the os-release file, * the value of the "Release" attribute returned by the lsb_release command, * the version number parsed from the "<version_id>" field of the first line of the distro release file, * the version number parsed from the "PRETTY_NAME" attribute of the os-release file, if it follows the format of the distro release files. * the version number parsed from the "Description" attribute returned by the lsb_release command, if it follows the format of the distro release files.
Return the version of the current OS distribution, as a human-readable string.
def version(pretty=False, best=False): """ Return the version of the current OS distribution, as a human-readable string. If *pretty* is false, the version is returned without codename (e.g. "7.0"). If *pretty* is true, the codename in parenthesis is appended, if the codename is non-empty (e.g. "7.0 (Maipo)"). Some distributions provide version numbers with different precisions in the different sources of distribution information. Examining the different sources in a fixed priority order does not always yield the most precise version (e.g. for Debian 8.2, or CentOS 7.1). The *best* parameter can be used to control the approach for the returned version: If *best* is false, the first non-empty version number in priority order of the examined sources is returned. If *best* is true, the most precise version number out of all examined sources is returned. **Lookup hierarchy:** In all cases, the version number is obtained from the following sources. If *best* is false, this order represents the priority order: * the value of the "VERSION_ID" attribute of the os-release file, * the value of the "Release" attribute returned by the lsb_release command, * the version number parsed from the "<version_id>" field of the first line of the distro release file, * the version number parsed from the "PRETTY_NAME" attribute of the os-release file, if it follows the format of the distro release files. * the version number parsed from the "Description" attribute returned by the lsb_release command, if it follows the format of the distro release files. """ return _distro.version(pretty, best)
[ "def", "version", "(", "pretty", "=", "False", ",", "best", "=", "False", ")", ":", "return", "_distro", ".", "version", "(", "pretty", ",", "best", ")" ]
[ 245, 0 ]
[ 286, 40 ]
python
en
['en', 'error', 'th']
False
version_parts
(best=False)
Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows: * ``major``: The result of :func:`distro.major_version`. * ``minor``: The result of :func:`distro.minor_version`. * ``build_number``: The result of :func:`distro.build_number`. For a description of the *best* parameter, see the :func:`distro.version` method.
Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows:
def version_parts(best=False): """ Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows: * ``major``: The result of :func:`distro.major_version`. * ``minor``: The result of :func:`distro.minor_version`. * ``build_number``: The result of :func:`distro.build_number`. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.version_parts(best)
[ "def", "version_parts", "(", "best", "=", "False", ")", ":", "return", "_distro", ".", "version_parts", "(", "best", ")" ]
[ 289, 0 ]
[ 303, 38 ]
python
en
['en', 'error', 'th']
False
major_version
(best=False)
Return the major version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The major version is the first part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method.
Return the major version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The major version is the first part of the dot-separated version string.
def major_version(best=False): """ Return the major version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The major version is the first part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.major_version(best)
[ "def", "major_version", "(", "best", "=", "False", ")", ":", "return", "_distro", ".", "major_version", "(", "best", ")" ]
[ 306, 0 ]
[ 316, 38 ]
python
en
['en', 'error', 'th']
False
minor_version
(best=False)
Return the minor version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The minor version is the second part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method.
Return the minor version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The minor version is the second part of the dot-separated version string.
def minor_version(best=False): """ Return the minor version of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The minor version is the second part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.minor_version(best)
[ "def", "minor_version", "(", "best", "=", "False", ")", ":", "return", "_distro", ".", "minor_version", "(", "best", ")" ]
[ 319, 0 ]
[ 329, 38 ]
python
en
['en', 'error', 'th']
False
build_number
(best=False)
Return the build number of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The build number is the third part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method.
Return the build number of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The build number is the third part of the dot-separated version string.
def build_number(best=False): """ Return the build number of the current OS distribution, as a string, if provided. Otherwise, the empty string is returned. The build number is the third part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.build_number(best)
[ "def", "build_number", "(", "best", "=", "False", ")", ":", "return", "_distro", ".", "build_number", "(", "best", ")" ]
[ 332, 0 ]
[ 342, 37 ]
python
en
['en', 'error', 'th']
False
like
()
Return a space-separated list of distro IDs of distributions that are closely related to the current OS distribution in regards to packaging and programming interfaces, for example distributions the current distribution is a derivative from. **Lookup hierarchy:** This information item is only provided by the os-release file. For details, see the description of the "ID_LIKE" attribute in the `os-release man page <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
Return a space-separated list of distro IDs of distributions that are closely related to the current OS distribution in regards to packaging and programming interfaces, for example distributions the current distribution is a derivative from.
def like(): """ Return a space-separated list of distro IDs of distributions that are closely related to the current OS distribution in regards to packaging and programming interfaces, for example distributions the current distribution is a derivative from. **Lookup hierarchy:** This information item is only provided by the os-release file. For details, see the description of the "ID_LIKE" attribute in the `os-release man page <http://www.freedesktop.org/software/systemd/man/os-release.html>`_. """ return _distro.like()
[ "def", "like", "(", ")", ":", "return", "_distro", ".", "like", "(", ")" ]
[ 345, 0 ]
[ 359, 25 ]
python
en
['en', 'error', 'th']
False
codename
()
Return the codename for the release of the current OS distribution, as a string. If the distribution does not have a codename, an empty string is returned. Note that the returned codename is not always really a codename. For example, openSUSE returns "x86_64". This function does not handle such cases in any special way and just returns the string it finds, if any. **Lookup hierarchy:** * the codename within the "VERSION" attribute of the os-release file, if provided, * the value of the "Codename" attribute returned by the lsb_release command, * the value of the "<codename>" field of the distro release file.
Return the codename for the release of the current OS distribution, as a string.
def codename(): """ Return the codename for the release of the current OS distribution, as a string. If the distribution does not have a codename, an empty string is returned. Note that the returned codename is not always really a codename. For example, openSUSE returns "x86_64". This function does not handle such cases in any special way and just returns the string it finds, if any. **Lookup hierarchy:** * the codename within the "VERSION" attribute of the os-release file, if provided, * the value of the "Codename" attribute returned by the lsb_release command, * the value of the "<codename>" field of the distro release file. """ return _distro.codename()
[ "def", "codename", "(", ")", ":", "return", "_distro", ".", "codename", "(", ")" ]
[ 362, 0 ]
[ 383, 29 ]
python
en
['en', 'error', 'th']
False
info
(pretty=False, best=False)
Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example: .. sourcecode:: python { 'id': 'rhel', 'version': '7.0', 'version_parts': { 'major': '7', 'minor': '0', 'build_number': '' }, 'like': 'fedora', 'codename': 'Maipo' } The dictionary structure and keys are always the same, regardless of which information items are available in the underlying data sources. The values for the various keys are as follows: * ``id``: The result of :func:`distro.id`. * ``version``: The result of :func:`distro.version`. * ``version_parts -> major``: The result of :func:`distro.major_version`. * ``version_parts -> minor``: The result of :func:`distro.minor_version`. * ``version_parts -> build_number``: The result of :func:`distro.build_number`. * ``like``: The result of :func:`distro.like`. * ``codename``: The result of :func:`distro.codename`. For a description of the *pretty* and *best* parameters, see the :func:`distro.version` method.
Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example:
def info(pretty=False, best=False): """ Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example: .. sourcecode:: python { 'id': 'rhel', 'version': '7.0', 'version_parts': { 'major': '7', 'minor': '0', 'build_number': '' }, 'like': 'fedora', 'codename': 'Maipo' } The dictionary structure and keys are always the same, regardless of which information items are available in the underlying data sources. The values for the various keys are as follows: * ``id``: The result of :func:`distro.id`. * ``version``: The result of :func:`distro.version`. * ``version_parts -> major``: The result of :func:`distro.major_version`. * ``version_parts -> minor``: The result of :func:`distro.minor_version`. * ``version_parts -> build_number``: The result of :func:`distro.build_number`. * ``like``: The result of :func:`distro.like`. * ``codename``: The result of :func:`distro.codename`. For a description of the *pretty* and *best* parameters, see the :func:`distro.version` method. """ return _distro.info(pretty, best)
[ "def", "info", "(", "pretty", "=", "False", ",", "best", "=", "False", ")", ":", "return", "_distro", ".", "info", "(", "pretty", ",", "best", ")" ]
[ 386, 0 ]
[ 427, 37 ]
python
en
['en', 'error', 'th']
False
os_release_info
()
Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current OS distribution. See `os-release file`_ for details about these information items.
Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current OS distribution.
def os_release_info(): """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current OS distribution. See `os-release file`_ for details about these information items. """ return _distro.os_release_info()
[ "def", "os_release_info", "(", ")", ":", "return", "_distro", ".", "os_release_info", "(", ")" ]
[ 430, 0 ]
[ 437, 36 ]
python
en
['en', 'error', 'th']
False
lsb_release_info
()
Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current OS distribution. See `lsb_release command output`_ for details about these information items.
Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current OS distribution.
def lsb_release_info(): """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current OS distribution. See `lsb_release command output`_ for details about these information items. """ return _distro.lsb_release_info()
[ "def", "lsb_release_info", "(", ")", ":", "return", "_distro", ".", "lsb_release_info", "(", ")" ]
[ 440, 0 ]
[ 448, 37 ]
python
en
['en', 'error', 'th']
False
distro_release_info
()
Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. See `distro release file`_ for details about these information items.
Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution.
def distro_release_info(): """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. See `distro release file`_ for details about these information items. """ return _distro.distro_release_info()
[ "def", "distro_release_info", "(", ")", ":", "return", "_distro", ".", "distro_release_info", "(", ")" ]
[ 451, 0 ]
[ 458, 40 ]
python
en
['en', 'error', 'th']
False
uname_info
()
Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution.
Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution.
def uname_info(): """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. """ return _distro.uname_info()
[ "def", "uname_info", "(", ")", ":", "return", "_distro", ".", "uname_info", "(", ")" ]
[ 461, 0 ]
[ 466, 31 ]
python
en
['en', 'error', 'th']
False
os_release_attr
(attribute)
Return a single named information item from the os-release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `os-release file`_ for details about these information items.
Return a single named information item from the os-release file data source of the current OS distribution.
def os_release_attr(attribute): """ Return a single named information item from the os-release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `os-release file`_ for details about these information items. """ return _distro.os_release_attr(attribute)
[ "def", "os_release_attr", "(", "attribute", ")", ":", "return", "_distro", ".", "os_release_attr", "(", "attribute", ")" ]
[ 469, 0 ]
[ 485, 45 ]
python
en
['en', 'error', 'th']
False
lsb_release_attr
(attribute)
Return a single named information item from the lsb_release command output data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `lsb_release command output`_ for details about these information items.
Return a single named information item from the lsb_release command output data source of the current OS distribution.
def lsb_release_attr(attribute): """ Return a single named information item from the lsb_release command output data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `lsb_release command output`_ for details about these information items. """ return _distro.lsb_release_attr(attribute)
[ "def", "lsb_release_attr", "(", "attribute", ")", ":", "return", "_distro", ".", "lsb_release_attr", "(", "attribute", ")" ]
[ 488, 0 ]
[ 505, 46 ]
python
en
['en', 'error', 'th']
False
distro_release_attr
(attribute)
Return a single named information item from the distro release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `distro release file`_ for details about these information items.
Return a single named information item from the distro release file data source of the current OS distribution.
def distro_release_attr(attribute): """ Return a single named information item from the distro release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `distro release file`_ for details about these information items. """ return _distro.distro_release_attr(attribute)
[ "def", "distro_release_attr", "(", "attribute", ")", ":", "return", "_distro", ".", "distro_release_attr", "(", "attribute", ")" ]
[ 508, 0 ]
[ 524, 49 ]
python
en
['en', 'error', 'th']
False
uname_attr
(attribute)
Return a single named information item from the distro release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist.
Return a single named information item from the distro release file data source of the current OS distribution.
def uname_attr(attribute): """ Return a single named information item from the distro release file data source of the current OS distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. """ return _distro.uname_attr(attribute)
[ "def", "uname_attr", "(", "attribute", ")", ":", "return", "_distro", ".", "uname_attr", "(", "attribute", ")" ]
[ 527, 0 ]
[ 541, 40 ]
python
en
['en', 'error', 'th']
False