text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _disable(self): """ The configuration containing this function has been disabled by host. Endpoint do not work anymore, so cancel AIO operation blocks. """
if self._enabled: self._real_onCannotSend() has_cancelled = 0 for block in self._aio_recv_block_list + self._aio_send_block_list: try: self._aio_context.cancel(block) except OSError as exc: trace( 'cancelling %r raised: %s' % (block, exc), ) else: has_cancelled += 1 if has_cancelled: noIntr(functools.partial(self._aio_context.getEvents, min_nr=None)) self._enabled = False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onAIOCompletion(self): """ Call when eventfd notified events are available. """
event_count = self.eventfd.read() trace('eventfd reports %i events' % event_count) # Even though eventfd signaled activity, even though it may give us # some number of pending events, some events seem to have been already # processed (maybe during io_cancel call ?). # So do not trust eventfd value, and do not even trust that there must # be even one event to process. self._aio_context.getEvents(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _iter_errors_custom(instance, checks, options): """Perform additional validation not possible merely with JSON schemas. Args: instance: The STIX object to be validated. checks: A sequence of callables which do the checks. Each callable may be written to accept 1 arg, which is the object to check, or 2 args, which are the object and a ValidationOptions instance. options: ValidationOptions instance with settings affecting how validation should be done. """
# Perform validation for v_function in checks: try: result = v_function(instance) except TypeError: result = v_function(instance, options) if isinstance(result, Iterable): for x in result: yield x elif result is not None: yield result # Validate any child STIX objects for field in instance: if type(instance[field]) is list: for obj in instance[field]: if _is_stix_obj(obj): for err in _iter_errors_custom(obj, checks, options): yield err
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_json_files(directory, recursive=False): """Return a list of file paths for JSON files within `directory`. Args: directory: A path to a directory. recursive: If ``True``, this function will descend into all subdirectories. Returns: A list of JSON file paths directly under `directory`. """
json_files = [] for top, dirs, files in os.walk(directory): dirs.sort() # Get paths to each file in `files` paths = (os.path.join(top, f) for f in sorted(files)) # Add all the .json files to our return collection json_files.extend(x for x in paths if is_json(x)) if not recursive: break return json_files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_json_files(files, recursive=False): """Return a list of files to validate from `files`. If a member of `files` is a directory, its children with a ``.json`` extension will be added to the return value. Args: files: A list of file paths and/or directory paths. recursive: If ``true``, this will descend into any subdirectories of input directories. Returns: A list of file paths to validate. """
json_files = [] if not files: return json_files for fn in files: if os.path.isdir(fn): children = list_json_files(fn, recursive) json_files.extend(children) elif is_json(fn): json_files.append(fn) else: continue if not json_files: raise NoJSONFileFoundError("No JSON files found!") return json_files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_validation(options): """Validate files based on command line options. Args: options: An instance of ``ValidationOptions`` containing options for this validation run. """
if options.files == sys.stdin: results = validate(options.files, options) return [FileValidationResults(is_valid=results.is_valid, filepath='stdin', object_results=results)] files = get_json_files(options.files, options.recursive) results = [validate_file(fn, options) for fn in files] return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_parsed_json(obj_json, options=None): """ Validate objects from parsed JSON. This supports a single object, or a list of objects. If a single object is given, a single result is returned. Otherwise, a list of results is returned. If an error occurs, a ValidationErrorResults instance or list which includes one of these instances, is returned. :param obj_json: The parsed json :param options: Validation options :return: An ObjectValidationResults instance, or a list of such. """
validating_list = isinstance(obj_json, list) if not options: options = ValidationOptions() if not options.no_cache: init_requests_cache(options.refresh_cache) results = None if validating_list: results = [] for obj in obj_json: try: results.append(validate_instance(obj, options)) except SchemaInvalidError as ex: error_result = ObjectValidationResults(is_valid=False, object_id=obj.get('id', ''), errors=[str(ex)]) results.append(error_result) else: try: results = validate_instance(obj_json, options) except SchemaInvalidError as ex: error_result = ObjectValidationResults(is_valid=False, object_id=obj_json.get('id', ''), errors=[str(ex)]) results = error_result if not options.no_cache and options.clear_cache: clear_requests_cache() return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate(in_, options=None): """ Validate objects from JSON data in a textual stream. :param in_: A textual stream of JSON data. :param options: Validation options :return: An ObjectValidationResults instance, or a list of such. """
obj_json = json.load(in_) results = validate_parsed_json(obj_json, options) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_file(fn, options=None): """Validate the input document `fn` according to the options passed in. If any exceptions are raised during validation, no further validation will take place. Args: fn: The filename of the JSON file to be validated. options: An instance of ``ValidationOptions``. Returns: An instance of FileValidationResults. """
file_results = FileValidationResults(filepath=fn) output.info("Performing JSON schema validation on %s" % fn) if not options: options = ValidationOptions(files=fn) try: with open(fn) as instance_file: file_results.object_results = validate(instance_file, options) except Exception as ex: if 'Expecting value' in str(ex): line_no = str(ex).split()[3] file_results.fatal = ValidationErrorResults( 'Invalid JSON input on line %s' % line_no ) else: file_results.fatal = ValidationErrorResults(ex) msg = ("Unexpected error occurred with file '{fn}'. No further " "validation will be performed: {error}") output.info(msg.format(fn=fn, error=str(ex))) file_results.is_valid = (all(object_result.is_valid for object_result in file_results.object_results) and not file_results.fatal) return file_results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_string(string, options=None): """Validate the input `string` according to the options passed in. If any exceptions are raised during validation, no further validation will take place. Args: string: The string containing the JSON to be validated. options: An instance of ``ValidationOptions``. Returns: An ObjectValidationResults instance, or a list of such. """
output.info("Performing JSON schema validation on input string: " + string) stream = io.StringIO(string) return validate(stream, options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_validator(schema_path, schema): """Create a JSON schema validator for the given schema. Args: schema_path: The filename of the JSON schema. schema: A Python object representation of the same schema. Returns: An instance of Draft4Validator. """
# Get correct prefix based on OS if os.name == 'nt': file_prefix = 'file:///' else: file_prefix = 'file:' resolver = RefResolver(file_prefix + schema_path.replace("\\", "/"), schema) validator = Draft4Validator(schema, resolver=resolver) return validator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_schema(schema_dir, obj_type): """Search the `schema_dir` directory for a schema called `obj_type`.json. Return the file path of the first match it finds. """
schema_filename = obj_type + '.json' for root, dirnames, filenames in os.walk(schema_dir): if schema_filename in filenames: return os.path.join(root, schema_filename)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_schema(schema_path): """Load the JSON schema at the given path as a Python object. Args: schema_path: A filename for a JSON schema. Returns: A Python object representation of the schema. """
try: with open(schema_path) as schema_file: schema = json.load(schema_file) except ValueError as e: raise SchemaInvalidError('Invalid JSON in schema or included schema: ' '%s\n%s' % (schema_file.name, str(e))) return schema
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_error_generator(type, obj, schema_dir=None, version=DEFAULT_VER, default='core'): """Get a generator for validating against the schema for the given object type. Args: type (str): The object type to find the schema for. obj: The object to be validated. schema_dir (str): The path in which to search for schemas. version (str): The version of the STIX specification to validate against. Only used to find base schemas when schema_dir is None. default (str): If the schema for the given type cannot be found, use the one with this name instead. Returns: A generator for errors found when validating the object against the appropriate schema, or None if schema_dir is None and the schema cannot be found. """
# If no schema directory given, use default for the given STIX version, # which comes bundled with this package if schema_dir is None: schema_dir = os.path.abspath(os.path.dirname(__file__) + '/schemas-' + version + '/') try: schema_path = find_schema(schema_dir, type) schema = load_schema(schema_path) except (KeyError, TypeError): # Assume a custom object with no schema try: schema_path = find_schema(schema_dir, default) schema = load_schema(schema_path) except (KeyError, TypeError): # Only raise an error when checking against default schemas, not custom if schema_dir is not None: return None raise SchemaInvalidError("Cannot locate a schema for the object's " "type, nor the base schema ({}.json).".format(default)) if type == 'observed-data' and schema_dir is None: # Validate against schemas for specific observed data object types later. # If schema_dir is not None the schema is custom and won't need to be modified. schema['allOf'][1]['properties']['objects'] = { "objects": { "type": "object", "minProperties": 1 } } # Don't use custom validator; only check schemas, no additional checks validator = load_validator(schema_path, schema) try: error_gen = validator.iter_errors(obj) except schema_exceptions.RefResolutionError: raise SchemaInvalidError('Invalid JSON schema: a JSON ' 'reference failed to resolve') return error_gen
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_musts(options): """Return the list of 'MUST' validators for the correct version of STIX. Args: options: ValidationOptions instance with validation options for this validation run, including the STIX spec version. """
if options.version == '2.0': return musts20.list_musts(options) else: return musts21.list_musts(options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_shoulds(options): """Return the list of 'SHOULD' validators for the correct version of STIX. Args: options: ValidationOptions instance with validation options for this validation run, including the STIX spec version. """
if options.version == '2.0': return shoulds20.list_shoulds(options) else: return shoulds21.list_shoulds(options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _schema_validate(sdo, options): """Set up validation of a single STIX object against its type's schema. This does no actual validation; it just returns generators which must be iterated to trigger the actual generation. This function first creates generators for the built-in schemas, then adds generators for additional schemas from the options, if specified. Do not call this function directly; use validate_instance() instead, as it calls this one. This function does not perform any custom checks. """
error_gens = [] if 'id' in sdo: try: error_prefix = sdo['id'] + ": " except TypeError: error_prefix = 'unidentifiable object: ' else: error_prefix = '' # Get validator for built-in schema base_sdo_errors = _get_error_generator(sdo['type'], sdo, version=options.version) if base_sdo_errors: error_gens.append((base_sdo_errors, error_prefix)) # Get validator for any user-supplied schema if options.schema_dir: custom_sdo_errors = _get_error_generator(sdo['type'], sdo, options.schema_dir) if custom_sdo_errors: error_gens.append((custom_sdo_errors, error_prefix)) # Validate each cyber observable object separately if sdo['type'] == 'observed-data' and 'objects' in sdo: # Check if observed data property is in dictionary format if not isinstance(sdo['objects'], dict): error_gens.append(([schema_exceptions.ValidationError("Observed Data objects must be in dict format.", error_prefix)], error_prefix)) return error_gens for key, obj in iteritems(sdo['objects']): if 'type' not in obj: error_gens.append(([schema_exceptions.ValidationError("Observable object must contain a 'type' property.", error_prefix)], error_prefix + 'object \'' + key + '\': ')) continue # Get validator for built-in schemas base_obs_errors = _get_error_generator(obj['type'], obj, None, options.version, 'cyber-observable-core') if base_obs_errors: error_gens.append((base_obs_errors, error_prefix + 'object \'' + key + '\': ')) # Get validator for any user-supplied schema custom_obs_errors = _get_error_generator(obj['type'], obj, options.schema_dir, options.version, 'cyber-observable-core') if custom_obs_errors: error_gens.append((custom_obs_errors, error_prefix + 'object \'' + key + '\': ')) return error_gens
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_instance(instance, options=None): """Perform STIX JSON Schema validation against STIX input. Find the correct schema by looking at the 'type' property of the `instance` JSON object. Args: instance: A Python dictionary representing a STIX object with a 'type' property. options: ValidationOptions instance with validation options for this validation run. Returns: A dictionary of validation results """
if 'type' not in instance: raise ValidationError("Input must be an object with a 'type' property.") if not options: options = ValidationOptions() error_gens = [] # Schema validation if instance['type'] == 'bundle' and 'objects' in instance: # Validate each object in a bundle separately for sdo in instance['objects']: if 'type' not in sdo: raise ValidationError("Each object in bundle must have a 'type' property.") error_gens += _schema_validate(sdo, options) else: error_gens += _schema_validate(instance, options) # Custom validation must_checks = _get_musts(options) should_checks = _get_shoulds(options) output.info("Running the following additional checks: %s." % ", ".join(x.__name__ for x in chain(must_checks, should_checks))) try: errors = _iter_errors_custom(instance, must_checks, options) warnings = _iter_errors_custom(instance, should_checks, options) if options.strict: chained_errors = chain(errors, warnings) warnings = [] else: chained_errors = errors warnings = [pretty_error(x, options.verbose) for x in warnings] except schema_exceptions.RefResolutionError: raise SchemaInvalidError('Invalid JSON schema: a JSON reference ' 'failed to resolve') # List of error generators and message prefixes (to denote which object the # error comes from) error_gens += [(chained_errors, '')] # Prepare the list of errors (this actually triggers the custom validation # functions). error_list = [] for gen, prefix in error_gens: for error in gen: msg = prefix + pretty_error(error, options.verbose) error_list.append(SchemaError(msg)) if error_list: valid = False else: valid = True return ObjectValidationResults(is_valid=valid, object_id=instance.get('id', ''), errors=error_list, warnings=warnings)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_prefix_strict(instance): """Ensure custom content follows strict naming style conventions. """
for error in chain(custom_object_prefix_strict(instance), custom_property_prefix_strict(instance), custom_observable_object_prefix_strict(instance), custom_object_extension_prefix_strict(instance), custom_observable_properties_prefix_strict(instance)): yield error
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_prefix_lax(instance): """Ensure custom content follows lenient naming style conventions for forward-compatibility. """
for error in chain(custom_object_prefix_lax(instance), custom_property_prefix_lax(instance), custom_observable_object_prefix_lax(instance), custom_object_extension_prefix_lax(instance), custom_observable_properties_prefix_lax(instance)): yield error
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_object_prefix_strict(instance): """Ensure custom objects follow strict naming style conventions. """
if (instance['type'] not in enums.TYPES and instance['type'] not in enums.RESERVED_OBJECTS and not CUSTOM_TYPE_PREFIX_RE.match(instance['type'])): yield JSONError("Custom object type '%s' should start with 'x-' " "followed by a source unique identifier (like a " "domain name with dots replaced by hyphens), a hyphen " "and then the name." % instance['type'], instance['id'], 'custom-prefix')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_object_prefix_lax(instance): """Ensure custom objects follow lenient naming style conventions for forward-compatibility. """
if (instance['type'] not in enums.TYPES and instance['type'] not in enums.RESERVED_OBJECTS and not CUSTOM_TYPE_LAX_PREFIX_RE.match(instance['type'])): yield JSONError("Custom object type '%s' should start with 'x-' in " "order to be compatible with future versions of the " "STIX 2 specification." % instance['type'], instance['id'], 'custom-prefix-lax')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_property_prefix_strict(instance): """Ensure custom properties follow strict naming style conventions. Does not check property names in custom objects. """
for prop_name in instance.keys(): if (instance['type'] in enums.PROPERTIES and prop_name not in enums.PROPERTIES[instance['type']] and prop_name not in enums.RESERVED_PROPERTIES and not CUSTOM_PROPERTY_PREFIX_RE.match(prop_name)): yield JSONError("Custom property '%s' should have a type that " "starts with 'x_' followed by a source unique " "identifier (like a domain name with dots " "replaced by hyphen), a hyphen and then the name." % prop_name, instance['id'], 'custom-prefix')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_property_prefix_lax(instance): """Ensure custom properties follow lenient naming style conventions for forward-compatibility. Does not check property names in custom objects. """
for prop_name in instance.keys(): if (instance['type'] in enums.PROPERTIES and prop_name not in enums.PROPERTIES[instance['type']] and prop_name not in enums.RESERVED_PROPERTIES and not CUSTOM_PROPERTY_LAX_PREFIX_RE.match(prop_name)): yield JSONError("Custom property '%s' should have a type that " "starts with 'x_' in order to be compatible with " "future versions of the STIX 2 specification." % prop_name, instance['id'], 'custom-prefix-lax')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def open_vocab_values(instance): """Ensure that the values of all properties which use open vocabularies are in lowercase and use hyphens instead of spaces or underscores as word separators. """
if instance['type'] not in enums.VOCAB_PROPERTIES: return properties = enums.VOCAB_PROPERTIES[instance['type']] for prop in properties: if prop in instance: if type(instance[prop]) is list: values = instance[prop] else: values = [instance[prop]] for v in values: if not v.islower() or '_' in v or ' ' in v: yield JSONError("Open vocabulary value '%s' should be all" " lowercase and use hyphens instead of" " spaces or underscores as word" " separators." % v, instance['id'], 'open-vocab-format')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kill_chain_phase_names(instance): """Ensure the `kill_chain_name` and `phase_name` properties of `kill_chain_phase` objects follow naming style conventions. """
if instance['type'] in enums.KILL_CHAIN_PHASE_USES and 'kill_chain_phases' in instance: for phase in instance['kill_chain_phases']: if 'kill_chain_name' not in phase: # Since this field is required, schemas will already catch the error return chain_name = phase['kill_chain_name'] if not chain_name.islower() or '_' in chain_name or ' ' in chain_name: yield JSONError("kill_chain_name '%s' should be all lowercase" " and use hyphens instead of spaces or " "underscores as word separators." % chain_name, instance['id'], 'kill-chain-names') phase_name = phase['phase_name'] if not phase_name.islower() or '_' in phase_name or ' ' in phase_name: yield JSONError("phase_name '%s' should be all lowercase and " "use hyphens instead of spaces or underscores " "as word separators." % phase_name, instance['id'], 'kill-chain-names')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_vocab(instance, vocab, code): """Ensure that the open vocabulary specified by `vocab` is used properly. This checks properties of objects specified in the appropriate `_USES` dictionary to determine which properties SHOULD use the given vocabulary, then checks that the values in those properties are from the vocabulary. """
vocab_uses = getattr(enums, vocab + "_USES") for k in vocab_uses.keys(): if instance['type'] == k: for prop in vocab_uses[k]: if prop not in instance: continue vocab_ov = getattr(enums, vocab + "_OV") if type(instance[prop]) is list: is_in = set(instance[prop]).issubset(set(vocab_ov)) else: is_in = instance[prop] in vocab_ov if not is_in: vocab_name = vocab.replace('_', '-').lower() yield JSONError("%s contains a value not in the %s-ov " "vocabulary." % (prop, vocab_name), instance['id'], code)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def vocab_marking_definition(instance): """Ensure that the `definition_type` property of `marking-definition` objects is one of the values in the STIX 2.0 specification. """
if (instance['type'] == 'marking-definition' and 'definition_type' in instance and not instance['definition_type'] in enums.MARKING_DEFINITION_TYPES): return JSONError("Marking definition `definition_type` should be one " "of: %s." % ', '.join(enums.MARKING_DEFINITION_TYPES), instance['id'], 'marking-definition-type')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def relationships_strict(instance): """Ensure that only the relationship types defined in the specification are used. """
# Don't check objects that aren't relationships or that are custom objects if (instance['type'] != 'relationship' or instance['type'] not in enums.TYPES): return if ('relationship_type' not in instance or 'source_ref' not in instance or 'target_ref' not in instance): # Since these fields are required, schemas will already catch the error return r_type = instance['relationship_type'] try: r_source = re.search(r"(.+)\-\-", instance['source_ref']).group(1) r_target = re.search(r"(.+)\-\-", instance['target_ref']).group(1) except (AttributeError, TypeError): # Schemas already catch errors of these properties not being strings or # not containing the string '--'. return if (r_type in enums.COMMON_RELATIONSHIPS or r_source in enums.NON_SDOS or r_target in enums.NON_SDOS): # If all objects can have this relationship type, no more checks needed # Schemas already catch if source/target type cannot have relationship return if r_source not in enums.RELATIONSHIPS: return JSONError("'%s' is not a suggested relationship source object " "for the '%s' relationship." % (r_source, r_type), instance['id'], 'relationship-types') if r_type not in enums.RELATIONSHIPS[r_source]: return JSONError("'%s' is not a suggested relationship type for '%s' " "objects." % (r_type, r_source), instance['id'], 'relationship-types') if r_target not in enums.RELATIONSHIPS[r_source][r_type]: return JSONError("'%s' is not a suggested relationship target object " "for '%s' objects with the '%s' relationship." % (r_target, r_source, r_type), instance['id'], 'relationship-types')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def valid_hash_value(hashname): """Return true if given value is a valid, recommended hash name according to the STIX 2 specification. """
custom_hash_prefix_re = re.compile(r"^x_") if hashname in enums.HASH_ALGO_OV or custom_hash_prefix_re.match(hashname): return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def vocab_windows_pebinary_type(instance): """Ensure file objects with the windows-pebinary-ext extension have a 'pe-type' property that is from the windows-pebinary-type-ov vocabulary. """
for key, obj in instance['objects'].items(): if 'type' in obj and obj['type'] == 'file': try: pe_type = obj['extensions']['windows-pebinary-ext']['pe_type'] except KeyError: continue if pe_type not in enums.WINDOWS_PEBINARY_TYPE_OV: yield JSONError("Object '%s' has a Windows PE Binary File " "extension with a 'pe_type' of '%s', which is not a " "value in the windows-pebinary-type-ov vocabulary." % (key, pe_type), instance['id'], 'windows-pebinary-type')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def vocab_account_type(instance): """Ensure a user-account objects' 'account-type' property is from the account-type-ov vocabulary. """
for key, obj in instance['objects'].items(): if 'type' in obj and obj['type'] == 'user-account': try: acct_type = obj['account_type'] except KeyError: continue if acct_type not in enums.ACCOUNT_TYPE_OV: yield JSONError("Object '%s' is a User Account Object " "with an 'account_type' of '%s', which is not a " "value in the account-type-ov vocabulary." % (key, acct_type), instance['id'], 'account-type')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def observable_object_keys(instance): """Ensure observable-objects keys are non-negative integers. """
digits_re = re.compile(r"^\d+$") for key in instance['objects']: if not digits_re.match(key): yield JSONError("'%s' is not a good key value. Observable Objects " "should use non-negative integers for their keys." % key, instance['id'], 'observable-object-keys')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_observable_object_prefix_strict(instance): """Ensure custom observable objects follow strict naming style conventions. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and not CUSTOM_TYPE_PREFIX_RE.match(obj['type'])): yield JSONError("Custom Observable Object type '%s' should start " "with 'x-' followed by a source unique identifier " "(like a domain name with dots replaced by " "hyphens), a hyphen and then the name." % obj['type'], instance['id'], 'custom-prefix')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_observable_object_prefix_lax(instance): """Ensure custom observable objects follow naming style conventions. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and not CUSTOM_TYPE_LAX_PREFIX_RE.match(obj['type'])): yield JSONError("Custom Observable Object type '%s' should start " "with 'x-'." % obj['type'], instance['id'], 'custom-prefix-lax')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_object_extension_prefix_strict(instance): """Ensure custom observable object extensions follow strict naming style conventions. """
for key, obj in instance['objects'].items(): if not ('extensions' in obj and 'type' in obj and obj['type'] in enums.OBSERVABLE_EXTENSIONS): continue for ext_key in obj['extensions']: if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and not CUSTOM_TYPE_PREFIX_RE.match(ext_key)): yield JSONError("Custom Cyber Observable Object extension type" " '%s' should start with 'x-' followed by a source " "unique identifier (like a domain name with dots " "replaced by hyphens), a hyphen and then the name." % ext_key, instance['id'], 'custom-prefix')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def custom_object_extension_prefix_lax(instance): """Ensure custom observable object extensions follow naming style conventions. """
for key, obj in instance['objects'].items(): if not ('extensions' in obj and 'type' in obj and obj['type'] in enums.OBSERVABLE_EXTENSIONS): continue for ext_key in obj['extensions']: if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and not CUSTOM_TYPE_LAX_PREFIX_RE.match(ext_key)): yield JSONError("Custom Cyber Observable Object extension type" " '%s' should start with 'x-'." % ext_key, instance['id'], 'custom-prefix-lax')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def network_traffic_ports(instance): """Ensure network-traffic objects contain both src_port and dst_port. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'network-traffic' and ('src_port' not in obj or 'dst_port' not in obj)): yield JSONError("The Network Traffic object '%s' should contain " "both the 'src_port' and 'dst_port' properties." % key, instance['id'], 'network-traffic-ports')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def mime_type(instance): """Ensure the 'mime_type' property of file objects comes from the Template column in the IANA media type registry. """
mime_pattern = re.compile(r'^(application|audio|font|image|message|model' '|multipart|text|video)/[a-zA-Z0-9.+_-]+') for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'file' and 'mime_type' in obj): if enums.media_types(): if obj['mime_type'] not in enums.media_types(): yield JSONError("The 'mime_type' property of object '%s' " "('%s') should be an IANA registered MIME " "Type of the form 'type/subtype'." % (key, obj['mime_type']), instance['id'], 'mime-type') else: info("Can't reach IANA website; using regex for mime types.") if not mime_pattern.match(obj['mime_type']): yield JSONError("The 'mime_type' property of object '%s' " "('%s') should be an IANA MIME Type of the" " form 'type/subtype'." % (key, obj['mime_type']), instance['id'], 'mime-type')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def protocols(instance): """Ensure the 'protocols' property of network-traffic objects contains only values from the IANA Service Name and Transport Protocol Port Number Registry. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'network-traffic' and 'protocols' in obj): for prot in obj['protocols']: if enums.protocols(): if prot not in enums.protocols(): yield JSONError("The 'protocols' property of object " "'%s' contains a value ('%s') not in " "IANA Service Name and Transport " "Protocol Port Number Registry." % (key, prot), instance['id'], 'protocols') else: info("Can't reach IANA website; using regex for protocols.") if not PROTOCOL_RE.match(prot): yield JSONError("The 'protocols' property of object " "'%s' contains a value ('%s') not in " "IANA Service Name and Transport " "Protocol Port Number Registry." % (key, prot), instance['id'], 'protocols')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pdf_doc_info(instance): """Ensure the keys of the 'document_info_dict' property of the pdf-ext extension of file objects are only valid PDF Document Information Dictionary Keys. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'file'): try: did = obj['extensions']['pdf-ext']['document_info_dict'] except KeyError: continue for elem in did: if elem not in enums.PDF_DID: yield JSONError("The 'document_info_dict' property of " "object '%s' contains a key ('%s') that is" " not a valid PDF Document Information " "Dictionary key." % (key, elem), instance['id'], 'pdf-doc-info')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def countries(instance): """Ensure that the `country` property of `location` objects is a valid ISO 3166-1 ALPHA-2 Code. """
if (instance['type'] == 'location' and 'country' in instance and not instance['country'].upper() in enums.COUNTRY_CODES): return JSONError("Location `country` should be a valid ISO 3166-1 " "ALPHA-2 Code.", instance['id'], 'marking-definition-type')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def windows_process_priority_format(instance): """Ensure the 'priority' property of windows-process-ext ends in '_CLASS'. """
class_suffix_re = re.compile(r'.+_CLASS$') for key, obj in instance['objects'].items(): if 'type' in obj and obj['type'] == 'process': try: priority = obj['extensions']['windows-process-ext']['priority'] except KeyError: continue if not class_suffix_re.match(priority): yield JSONError("The 'priority' property of object '%s' should" " end in '_CLASS'." % key, instance['id'], 'windows-process-priority-format')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def duplicate_ids(instance): """Ensure objects with duplicate IDs have different `modified` timestamps. """
if instance['type'] != 'bundle' or 'objects' not in instance: return unique_ids = {} for obj in instance['objects']: if 'id' not in obj or 'modified' not in obj: continue elif obj['id'] not in unique_ids: unique_ids[obj['id']] = obj['modified'] elif obj['modified'] == unique_ids[obj['id']]: yield JSONError("Duplicate ID '%s' has identical `modified` timestamp." " If they are different versions of the same object, " "they should have different `modified` properties." % obj['id'], instance['id'], 'duplicate-ids')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timestamp(instance): """Ensure timestamps contain sane months, days, hours, minutes, seconds. """
ts_re = re.compile(r"^[0-9]{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?Z$") timestamp_props = ['created', 'modified'] if instance['type'] in enums.TIMESTAMP_PROPERTIES: timestamp_props += enums.TIMESTAMP_PROPERTIES[instance['type']] for tprop in timestamp_props: if tprop in instance and ts_re.match(instance[tprop]): # Don't raise an error if schemas will catch it try: parser.parse(instance[tprop]) except ValueError as e: yield JSONError("'%s': '%s' is not a valid timestamp: %s" % (tprop, instance[tprop], str(e)), instance['id']) if has_cyber_observable_data(instance): for key, obj in instance['objects'].items(): if 'type' not in obj: continue if obj['type'] in enums.TIMESTAMP_OBSERVABLE_PROPERTIES: for tprop in enums.TIMESTAMP_OBSERVABLE_PROPERTIES[obj['type']]: if tprop in obj and ts_re.match(obj[tprop]): # Don't raise an error if schemas will catch it try: parser.parse(obj[tprop]) except ValueError as e: yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s" % (obj['type'], tprop, obj[tprop], str(e)), instance['id']) if obj['type'] in enums.TIMESTAMP_EMBEDDED_PROPERTIES: for embed in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']]: if embed in obj: for tprop in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']][embed]: if embed == 'extensions': for ext in obj[embed]: if tprop in obj[embed][ext] and ts_re.match(obj[embed][ext][tprop]): try: parser.parse(obj[embed][ext][tprop]) except ValueError as e: yield JSONError("'%s': '%s': '%s': '%s' is not a valid timestamp: %s" % (obj['type'], ext, tprop, obj[embed][ext][tprop], str(e)), instance['id']) elif tprop in obj[embed] and ts_re.match(obj[embed][tprop]): try: parser.parse(obj[embed][tprop]) except ValueError as e: yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s" % (obj['type'], tprop, obj[embed][tprop], str(e)), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def modified_created(instance): """`modified` property must be later or equal to `created` property """
if 'modified' in instance and 'created' in instance and \ instance['modified'] < instance['created']: msg = "'modified' (%s) must be later or equal to 'created' (%s)" return JSONError(msg % (instance['modified'], instance['created']), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def marking_selector_syntax(instance): """Ensure selectors in granular markings refer to items which are actually present in the object. """
if 'granular_markings' not in instance: return list_index_re = re.compile(r"\[(\d+)\]") for marking in instance['granular_markings']: if 'selectors' not in marking: continue selectors = marking['selectors'] for selector in selectors: segments = selector.split('.') obj = instance prev_segmt = None for segmt in segments: index_match = list_index_re.match(segmt) if index_match: try: idx = int(index_match.group(1)) obj = obj[idx] except IndexError: yield JSONError("'%s' is not a valid selector because" " %s is not a valid index." % (selector, idx), instance['id']) except KeyError: yield JSONError("'%s' is not a valid selector because" " '%s' is not a list." % (selector, prev_segmt), instance['id']) else: try: obj = obj[segmt] except KeyError as e: yield JSONError("'%s' is not a valid selector because" " %s is not a property." % (selector, e), instance['id']) except TypeError: yield JSONError("'%s' is not a valid selector because" " '%s' is not a property." % (selector, segmt), instance['id']) prev_segmt = segmt
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def observable_object_references(instance): """Ensure certain observable object properties reference the correct type of object. """
for key, obj in instance['objects'].items(): if 'type' not in obj: continue elif obj['type'] not in enums.OBSERVABLE_PROP_REFS: continue obj_type = obj['type'] for obj_prop in enums.OBSERVABLE_PROP_REFS[obj_type]: if obj_prop not in obj: continue enum_prop = enums.OBSERVABLE_PROP_REFS[obj_type][obj_prop] if isinstance(enum_prop, list): refs = obj[obj_prop] enum_vals = enum_prop for x in check_observable_refs(refs, obj_prop, enum_prop, '', enum_vals, key, instance): yield x elif isinstance(enum_prop, dict): for embedded_prop in enum_prop: if isinstance(obj[obj_prop], dict): if embedded_prop not in obj[obj_prop]: continue embedded_obj = obj[obj_prop][embedded_prop] for embed_obj_prop in embedded_obj: if embed_obj_prop not in enum_prop[embedded_prop]: continue refs = embedded_obj[embed_obj_prop] enum_vals = enum_prop[embedded_prop][embed_obj_prop] for x in check_observable_refs(refs, obj_prop, enum_prop, embed_obj_prop, enum_vals, key, instance): yield x elif isinstance(obj[obj_prop], list): for embedded_list_obj in obj[obj_prop]: if embedded_prop not in embedded_list_obj: continue embedded_obj = embedded_list_obj[embedded_prop] refs = embedded_obj enum_vals = enum_prop[embedded_prop] for x in check_observable_refs(refs, obj_prop, enum_prop, embedded_prop, enum_vals, key, instance): yield x
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def artifact_mime_type(instance): """Ensure the 'mime_type' property of artifact objects comes from the Template column in the IANA media type registry. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'artifact' and 'mime_type' in obj): if enums.media_types(): if obj['mime_type'] not in enums.media_types(): yield JSONError("The 'mime_type' property of object '%s' " "('%s') must be an IANA registered MIME " "Type of the form 'type/subtype'." % (key, obj['mime_type']), instance['id']) else: info("Can't reach IANA website; using regex for mime types.") mime_re = re.compile(r'^(application|audio|font|image|message|model' '|multipart|text|video)/[a-zA-Z0-9.+_-]+') if not mime_re.match(obj['mime_type']): yield JSONError("The 'mime_type' property of object '%s' " "('%s') should be an IANA MIME Type of the" " form 'type/subtype'." % (key, obj['mime_type']), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def character_set(instance): """Ensure certain properties of cyber observable objects come from the IANA Character Set list. """
char_re = re.compile(r'^[a-zA-Z0-9_\(\)-]+$') for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'directory' and 'path_enc' in obj): if enums.char_sets(): if obj['path_enc'] not in enums.char_sets(): yield JSONError("The 'path_enc' property of object '%s' " "('%s') must be an IANA registered " "character set." % (key, obj['path_enc']), instance['id']) else: info("Can't reach IANA website; using regex for character_set.") if not char_re.match(obj['path_enc']): yield JSONError("The 'path_enc' property of object '%s' " "('%s') must be an IANA registered " "character set." % (key, obj['path_enc']), instance['id']) if ('type' in obj and obj['type'] == 'file' and 'name_enc' in obj): if enums.char_sets(): if obj['name_enc'] not in enums.char_sets(): yield JSONError("The 'name_enc' property of object '%s' " "('%s') must be an IANA registered " "character set." % (key, obj['name_enc']), instance['id']) else: info("Can't reach IANA website; using regex for character_set.") if not char_re.match(obj['name_enc']): yield JSONError("The 'name_enc' property of object '%s' " "('%s') must be an IANA registered " "character set." % (key, obj['name_enc']), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def software_language(instance): """Ensure the 'language' property of software objects is a valid ISO 639-2 language code. """
for key, obj in instance['objects'].items(): if ('type' in obj and obj['type'] == 'software' and 'languages' in obj): for lang in obj['languages']: if lang not in enums.SOFTWARE_LANG_CODES: yield JSONError("The 'languages' property of object '%s' " "contains an invalid ISO 639-2 language " " code ('%s')." % (key, lang), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def types_strict(instance): """Ensure that no custom object types are used, but only the official ones from the specification. """
if instance['type'] not in enums.TYPES: yield JSONError("Object type '%s' is not one of those defined in the" " specification." % instance['type'], instance['id']) if has_cyber_observable_data(instance): for key, obj in instance['objects'].items(): if 'type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES: yield JSONError("Observable object %s is type '%s' which is " "not one of those defined in the " "specification." % (key, obj['type']), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def properties_strict(instance): """Ensure that no custom properties are used, but only the official ones from the specification. """
if instance['type'] not in enums.TYPES: return # only check properties for official objects defined_props = enums.PROPERTIES.get(instance['type'], []) for prop in instance.keys(): if prop not in defined_props: yield JSONError("Property '%s' is not one of those defined in the" " specification." % prop, instance['id']) if has_cyber_observable_data(instance): for key, obj in instance['objects'].items(): type_ = obj.get('type', '') if type_ not in enums.OBSERVABLE_PROPERTIES: continue # custom observable types handled outside this function observable_props = enums.OBSERVABLE_PROPERTIES.get(type_, []) embedded_props = enums.OBSERVABLE_EMBEDDED_PROPERTIES.get(type_, {}) extensions = enums.OBSERVABLE_EXTENSIONS.get(type_, []) for prop in obj.keys(): if prop not in observable_props: yield JSONError("Property '%s' is not one of those defined in the" " specification for %s objects." % (prop, type_), instance['id']) # Check properties of embedded cyber observable types elif prop in embedded_props: embedded_prop_keys = embedded_props.get(prop, []) for embedded_key in obj[prop]: if isinstance(embedded_key, dict): for embedded in embedded_key: if embedded not in embedded_prop_keys: yield JSONError("Property '%s' is not one of those defined in the" " specification for the %s property in %s objects." % (embedded, prop, type_), instance['id']) elif embedded_key not in embedded_prop_keys: yield JSONError("Property '%s' is not one of those defined in the" " specification for the %s property in %s objects." % (embedded_key, prop, type_), instance['id']) # Check properties of embedded cyber observable types for ext_key in obj.get('extensions', {}): if ext_key not in extensions: continue # don't check custom extensions extension_props = enums.OBSERVABLE_EXTENSION_PROPERTIES[ext_key] for ext_prop in obj['extensions'][ext_key]: if ext_prop not in extension_props: yield JSONError("Property '%s' is not one of those defined in the" " specification for the %s extension in %s objects." % (ext_prop, ext_key, type_), instance['id']) embedded_ext_props = enums.OBSERVABLE_EXTENSION_EMBEDDED_PROPERTIES.get(ext_key, {}).get(ext_prop, []) if embedded_ext_props: for embed_ext_prop in obj['extensions'][ext_key].get(ext_prop, []): if embed_ext_prop not in embedded_ext_props: yield JSONError("Property '%s' in the %s property of the %s extension " "is not one of those defined in the specification." % (embed_ext_prop, ext_prop, ext_key), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def char_sets(): """Return a list of the IANA Character Sets, or an empty list if the IANA website is unreachable. Store it as a function attribute so that we only build the list once. """
if not hasattr(char_sets, 'setlist'): clist = [] try: data = requests.get('http://www.iana.org/assignments/character-' 'sets/character-sets-1.csv') except requests.exceptions.RequestException: return [] for line in data.iter_lines(): if line: line = line.decode("utf-8") if line.count(',') > 0: vals = line.split(',') if vals[0]: clist.append(vals[0]) else: clist.append(vals[1]) char_sets.setlist = clist return char_sets.setlist
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def protocols(): """Return a list of values from the IANA Service Name and Transport Protocol Port Number Registry, or an empty list if the IANA website is unreachable. Store it as a function attribute so that we only build the list once. """
if not hasattr(protocols, 'protlist'): plist = [] try: data = requests.get('http://www.iana.org/assignments/service-names' '-port-numbers/service-names-port-numbers.csv') except requests.exceptions.RequestException: return [] for line in data.iter_lines(): if line: line = line.decode("utf-8") if line.count(',') > 0: vals = line.split(',') if vals[0]: plist.append(vals[0]) if len(vals) > 2 and vals[2] and vals[2] not in plist: plist.append(vals[2]) plist.append('ipv4') plist.append('ipv6') plist.append('ssl') plist.append('tls') plist.append('dns') protocols.protlist = plist return protocols.protlist
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_level(log_function, fmt, level, *args): """Print a formatted message to stdout prepended by spaces. Useful for printing hierarchical information, like bullet lists. Note: If the application is running in "Silent Mode" (i.e., ``_SILENT == True``), this function will return immediately and no message will be printed. Args: log_function: The function that will be called to output the formatted message. fmt (str): A Python formatted string. level (int): Used to determing how many spaces to print. The formula is ``' ' * level ``. *args: Variable length list of arguments. Values are plugged into the format string. Examples: TEST 0 TEST 1 TEST 2 """
if _SILENT: return msg = fmt % args spaces = ' ' * level log_function("%s%s" % (spaces, msg))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_fatal_results(results, level=0): """Print fatal errors that occurred during validation runs. """
print_level(logger.critical, _RED + "[X] Fatal Error: %s", level, results.error)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_schema_results(results, level=0): """Print JSON Schema validation errors to stdout. Args: results: An instance of ObjectValidationResults. level: The level at which to print the results. """
for error in results.errors: print_level(logger.error, _RED + "[X] %s", level, error)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_warning_results(results, level=0): """Print warning messages found during validation. """
marker = _YELLOW + "[!] " for warning in results.warnings: print_level(logger.warning, marker + "Warning: %s", level, warning)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_results_header(identifier, is_valid): """Print a header for the results of either a file or an object. """
print_horizontal_rule() print_level(logger.info, "[-] Results for: %s", 0, identifier) if is_valid: marker = _GREEN + "[+]" verdict = "Valid" log_func = logger.info else: marker = _RED + "[X]" verdict = "Invalid" log_func = logger.error print_level(log_func, "%s STIX JSON: %s", 0, marker, verdict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_object_results(obj_result): """Print the results of validating an object. Args: obj_result: An ObjectValidationResults instance. """
print_results_header(obj_result.object_id, obj_result.is_valid) if obj_result.warnings: print_warning_results(obj_result, 1) if obj_result.errors: print_schema_results(obj_result, 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_file_results(file_result): """Print the results of validating a file. Args: file_result: A FileValidationResults instance. """
print_results_header(file_result.filepath, file_result.is_valid) for object_result in file_result.object_results: if object_result.warnings: print_warning_results(object_result, 1) if object_result.errors: print_schema_results(object_result, 1) if file_result.fatal: print_fatal_results(file_result.fatal, 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def vocab_encryption_algo(instance): """Ensure file objects' 'encryption_algorithm' property is from the encryption-algo-ov vocabulary. """
for key, obj in instance['objects'].items(): if 'type' in obj and obj['type'] == 'file': try: enc_algo = obj['encryption_algorithm'] except KeyError: continue if enc_algo not in enums.ENCRYPTION_ALGO_OV: yield JSONError("Object '%s' has an 'encryption_algorithm' of " "'%s', which is not a value in the " "encryption-algo-ov vocabulary." % (key, enc_algo), instance['id'], 'encryption-algo')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def enforce_relationship_refs(instance): """Ensures that all SDOs being referenced by the SRO are contained within the same bundle"""
if instance['type'] != 'bundle' or 'objects' not in instance: return rel_references = set() """Find and store all ids""" for obj in instance['objects']: if obj['type'] != 'relationship': rel_references.add(obj['id']) """Check if id has been encountered""" for obj in instance['objects']: if obj['type'] == 'relationship': if obj['source_ref'] not in rel_references: yield JSONError("Relationship object %s makes reference to %s " "Which is not found in current bundle " % (obj['id'], obj['source_ref']), 'enforce-relationship-refs') if obj['target_ref'] not in rel_references: yield JSONError("Relationship object %s makes reference to %s " "Which is not found in current bundle " % (obj['id'], obj['target_ref']), 'enforce-relationship-refs')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timestamp_compare(instance): """Ensure timestamp properties with a comparison requirement are valid. E.g. `modified` must be later or equal to `created`. """
compares = [('modified', 'ge', 'created')] additional_compares = enums.TIMESTAMP_COMPARE.get(instance.get('type', ''), []) compares.extend(additional_compares) for first, op, second in compares: comp = getattr(operator, op) comp_str = get_comparison_string(op) if first in instance and second in instance and \ not comp(instance[first], instance[second]): msg = "'%s' (%s) must be %s '%s' (%s)" yield JSONError(msg % (first, instance[first], comp_str, second, instance[second]), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def observable_timestamp_compare(instance): """Ensure cyber observable timestamp properties with a comparison requirement are valid. """
for key, obj in instance['objects'].items(): compares = enums.TIMESTAMP_COMPARE_OBSERVABLE.get(obj.get('type', ''), []) print(compares) for first, op, second in compares: comp = getattr(operator, op) comp_str = get_comparison_string(op) if first in obj and second in obj and \ not comp(obj[first], obj[second]): msg = "In object '%s', '%s' (%s) must be %s '%s' (%s)" yield JSONError(msg % (key, first, obj[first], comp_str, second, obj[second]), instance['id'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def language_contents(instance): """Ensure keys in Language Content's 'contents' dictionary are valid language codes, and that the keys in the sub-dictionaries match the rules for object property names. """
if instance['type'] != 'language-content' or 'contents' not in instance: return for key, value in instance['contents'].items(): if key not in enums.LANG_CODES: yield JSONError("Invalid key '%s' in 'contents' property must be" " an RFC 5646 code" % key, instance['id']) for subkey, subvalue in value.items(): if not PROPERTY_FORMAT_RE.match(subkey): yield JSONError("'%s' in '%s' of the 'contents' property is " "invalid and must match a valid property name" % (subkey, key), instance['id'], 'observable-dictionary-keys')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cyber_observable_check(original_function): """Decorator for functions that require cyber observable data. """
def new_function(*args, **kwargs): if not has_cyber_observable_data(args[0]): return func = original_function(*args, **kwargs) if isinstance(func, Iterable): for x in original_function(*args, **kwargs): yield x new_function.__name__ = original_function.__name__ return new_function
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_requests_cache(refresh_cache=False): """ Initializes a cache which the ``requests`` library will consult for responses, before making network requests. :param refresh_cache: Whether the cache should be cleared out """
# Cache data from external sources; used in some checks dirs = AppDirs("stix2-validator", "OASIS") # Create cache dir if doesn't exist try: os.makedirs(dirs.user_cache_dir) except OSError as e: if e.errno != errno.EEXIST: raise requests_cache.install_cache( cache_name=os.path.join(dirs.user_cache_dir, 'py{}cache'.format( sys.version_info[0])), expire_after=datetime.timedelta(weeks=1)) if refresh_cache: clear_requests_cache()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parse(self, parser): '''parse content of extension''' # line number of token that started the tag lineno = next(parser.stream).lineno # template context context = nodes.ContextReference() # parse keyword arguments kwargs = [] while parser.stream.look().type == lexer.TOKEN_ASSIGN: key = parser.stream.expect(lexer.TOKEN_NAME) next(parser.stream) kwargs.append( nodes.Keyword(key.value, parser.parse_expression()), ) parser.stream.skip_if('comma') # parse content of the activeurl block up to endactiveurl body = parser.parse_statements(['name:endactiveurl'], drop_needle=True) args = [context] call_method = self.call_method( 'render_tag', args=args, kwargs=kwargs, ) return nodes.CallBlock(call_method, [], [], body).set_lineno(lineno)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_cache_key(content, **kwargs): '''generate cache key''' cache_key = '' for key in sorted(kwargs.keys()): cache_key = '{cache_key}.{key}:{value}'.format( cache_key=cache_key, key=key, value=kwargs[key], ) cache_key = '{content}{cache_key}'.format( content=content, cache_key=cache_key, ) # fix for non ascii symbols, ensure encoding, python3 hashlib fix cache_key = cache_key.encode('utf-8', 'ignore') cache_key = md5(cache_key).hexdigest() cache_key = '{prefix}.{version}.{language}.{cache_key}'.format( prefix=settings.ACTIVE_URL_CACHE_PREFIX, version=__version__, language=get_language(), cache_key=cache_key ) return cache_key
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def check_active(url, element, **kwargs): '''check "active" url, apply css_class''' menu = yesno_to_bool(kwargs['menu'], 'menu') ignore_params = yesno_to_bool(kwargs['ignore_params'], 'ignore_params') # check missing href parameter if not url.attrib.get('href', None) is None: # get href attribute href = url.attrib['href'].strip() # href="#" is often used when links shouldn't be handled by browsers. # For example, Bootstrap uses this for expandable menus on # small screens, see # https://getbootstrap.com/docs/4.0/components/navs/#using-dropdowns if href == '#': return False # split into urlparse object href = urlparse.urlsplit(href) # cut off hashtag (anchor) href = href._replace(fragment='') # cut off get params (?key=var&etc=var2) if ignore_params: href = href._replace(query='') kwargs['full_path'] = urlparse.urlunsplit( urlparse.urlsplit( kwargs['full_path'] )._replace(query='') ) # build urlparse object back into string href = urlparse.urlunsplit(href) # check empty href if href == '': # replace href with current location href = kwargs['full_path'] # compare full_path with href according to menu configuration if menu: # try mark "root" (/) url as "active", in equals way if href == '/' == kwargs['full_path']: logic = True # skip "root" (/) url, otherwise it will be always "active" elif href != '/': # start with logic logic = ( kwargs['full_path'].startswith(href) or # maybe an urlquoted href was supplied urlquote(kwargs['full_path']).startswith(href) or kwargs['full_path'].startswith(urlquote(href)) ) else: logic = False else: # equals logic logic = ( kwargs['full_path'] == href or # maybe an urlquoted href was supplied urlquote(kwargs['full_path']) == href or kwargs['full_path'] == urlquote(href) ) # "active" url found if logic: # check parent tag has "class" attribute or it is empty if element.attrib.get('class'): # prevent multiple "class" attribute adding if kwargs['css_class'] not in element.attrib['class']: # append "active" class element.attrib['class'] += ' {css_class}'.format( css_class=kwargs['css_class'], ) else: # create or set (if empty) "class" attribute element.attrib['class'] = kwargs['css_class'] return True # no "active" urls found return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def check_content(content, **kwargs): '''check content for "active" urls''' # valid html root tag try: # render elements tree from content tree = fragment_fromstring(content) # flag for prevent content rerendering, when no "active" urls found processed = False # django > 1.5 template boolean\None variables feature if isinstance(kwargs['parent_tag'], bool): if not kwargs['parent_tag']: kwargs['parent_tag'] = 'self' else: raise ImproperlyConfigured(''' parent_tag=True is not allowed ''') elif kwargs['parent_tag'] is None: kwargs['parent_tag'] = 'self' # if parent_tag is False\None\''\a\self # "active" status will be applied directly to "<a>" if kwargs['parent_tag'].lower() in ('a', 'self', ''): # xpath query to get all "<a>" urls = tree.xpath('.//a') # check "active" status for all urls for url in urls: if check_active(url, url, **kwargs): # mark flag for rerendering content processed = True # otherwise css_class must be applied to parent_tag else: # xpath query to get all parent tags elements = tree.xpath('.//{parent_tag}'.format( parent_tag=kwargs['parent_tag'], )) # check all elements for "active" "<a>" for element in elements: # xpath query to get all "<a>" urls = element.xpath('.//a') # check "active" status for all urls for url in urls: if check_active(url, element, **kwargs): # flag for rerendering content tree processed = True # stop checking other "<a>" break # do not rerender content if no "active" urls found if processed: # render content from tree return tostring(tree, encoding='unicode') # not valid html root tag except ParserError: # raise an exception with configuration example raise ImproperlyConfigured(''' content of {% activeurl %} must have valid html root tag for example {% activeurl %} <ul> <li> <a href="/page/">page</a> </li> <li> <a href="/other_page/">other_page</a> </li> </ul> {% endactiveurl %} in this case <ul> is valid content root tag ''') return content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def render_content(content, **kwargs): '''check content for "active" urls, store results to django cache''' # try to take pre rendered content from django cache, if caching is enabled if settings.ACTIVE_URL_CACHE: cache_key = get_cache_key(content, **kwargs) # get cached content from django cache backend from_cache = cache.get(cache_key) # return pre rendered content if it exist in cache if from_cache is not None: return from_cache # render content with "active" logic content = check_content(content, **kwargs) # write rendered content to django cache backend, if caching is enabled if settings.ACTIVE_URL_CACHE: cache.set(cache_key, content, settings.ACTIVE_URL_CACHE_TIMEOUT) return content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def load_configuration(self, **kwargs): '''load configuration, merge with default settings''' # update passed arguments with default values for key in settings.ACTIVE_URL_KWARGS: kwargs.setdefault(key, settings.ACTIVE_URL_KWARGS[key]) # "active" html tag css class self.css_class = kwargs['css_class'] # "active" html tag self.parent_tag = kwargs['parent_tag'] # flipper for menu support self.menu = kwargs['menu'] # whether to ignore / chomp get_params self.ignore_params = kwargs['ignore_params']
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_response(response, clazz, is_list=False, resource_name=None): """Parse a Marathon response into an object or list of objects."""
target = response.json()[ resource_name] if resource_name else response.json() if is_list: return [clazz.from_json(resource) for resource in target] else: return clazz.from_json(target)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _do_request(self, method, path, params=None, data=None): """Query Marathon server."""
headers = { 'Content-Type': 'application/json', 'Accept': 'application/json'} if self.auth_token: headers['Authorization'] = "token={}".format(self.auth_token) response = None servers = list(self.servers) while servers and response is None: server = servers.pop(0) url = ''.join([server.rstrip('/'), path]) try: response = self.session.request( method, url, params=params, data=data, headers=headers, auth=self.auth, timeout=self.timeout, verify=self.verify) marathon.log.info('Got response from %s', server) except requests.exceptions.RequestException as e: marathon.log.error( 'Error while calling %s: %s', url, str(e)) if response is None: raise NoResponseError('No remaining Marathon servers to try') if response.status_code >= 500: marathon.log.error('Got HTTP {code}: {body}'.format( code=response.status_code, body=response.text.encode('utf-8'))) raise InternalServerError(response) elif response.status_code >= 400: marathon.log.error('Got HTTP {code}: {body}'.format( code=response.status_code, body=response.text.encode('utf-8'))) if response.status_code == 404: raise NotFoundError(response) elif response.status_code == 409: raise ConflictError(response) else: raise MarathonHttpError(response) elif response.status_code >= 300: marathon.log.warn('Got HTTP {code}: {body}'.format( code=response.status_code, body=response.text.encode('utf-8'))) else: marathon.log.debug('Got HTTP {code}: {body}'.format( code=response.status_code, body=response.text.encode('utf-8'))) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _do_sse_request(self, path, params=None): """Query Marathon server for events."""
urls = [''.join([server.rstrip('/'), path]) for server in self.servers] while urls: url = urls.pop() try: # Requests does not set the original Authorization header on cross origin # redirects. If set allow_redirects=True we may get a 401 response. response = self.sse_session.get( url, params=params, stream=True, headers={'Accept': 'text/event-stream'}, auth=self.auth, verify=self.verify, allow_redirects=False ) except Exception as e: marathon.log.error( 'Error while calling %s: %s', url, e.message) else: if response.is_redirect and response.next: urls.append(response.next.url) marathon.log.debug("Got redirect to {}".format(response.next.url)) elif response.ok: return response.iter_lines() raise MarathonError('No remaining Marathon servers to try')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_app(self, app_id, app, minimal=True): """Create and start an app. :param str app_id: application ID :param :class:`marathon.models.app.MarathonApp` app: the application to create :param bool minimal: ignore nulls and empty collections :returns: the created app (on success) :rtype: :class:`marathon.models.app.MarathonApp` or False """
app.id = app_id data = app.to_json(minimal=minimal) response = self._do_request('POST', '/v2/apps', data=data) if response.status_code == 201: return self._parse_response(response, MarathonApp) else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_apps(self, cmd=None, embed_tasks=False, embed_counts=False, embed_deployments=False, embed_readiness=False, embed_last_task_failure=False, embed_failures=False, embed_task_stats=False, app_id=None, label=None, **kwargs): """List all apps. :param str cmd: if passed, only show apps with a matching `cmd` :param bool embed_tasks: embed tasks in result :param bool embed_counts: embed all task counts :param bool embed_deployments: embed all deployment identifier :param bool embed_readiness: embed all readiness check results :param bool embed_last_task_failure: embeds the last task failure :param bool embed_failures: shorthand for embed_last_task_failure :param bool embed_task_stats: embed task stats in result :param str app_id: if passed, only show apps with an 'id' that matches or contains this value :param str label: if passed, only show apps with the selected labels :param kwargs: arbitrary search filters :returns: list of applications :rtype: list[:class:`marathon.models.app.MarathonApp`] """
params = {} if cmd: params['cmd'] = cmd if app_id: params['id'] = app_id if label: params['label'] = label embed_params = { 'app.tasks': embed_tasks, 'app.counts': embed_counts, 'app.deployments': embed_deployments, 'app.readiness': embed_readiness, 'app.lastTaskFailure': embed_last_task_failure, 'app.failures': embed_failures, 'app.taskStats': embed_task_stats } filtered_embed_params = [k for (k, v) in embed_params.items() if v] if filtered_embed_params: params['embed'] = filtered_embed_params response = self._do_request('GET', '/v2/apps', params=params) apps = self._parse_response( response, MarathonApp, is_list=True, resource_name='apps') for k, v in kwargs.items(): apps = [o for o in apps if getattr(o, k) == v] return apps
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_app(self, app_id, embed_tasks=False, embed_counts=False, embed_deployments=False, embed_readiness=False, embed_last_task_failure=False, embed_failures=False, embed_task_stats=False): """Get a single app. :param str app_id: application ID :param bool embed_tasks: embed tasks in result :param bool embed_counts: embed all task counts :param bool embed_deployments: embed all deployment identifier :param bool embed_readiness: embed all readiness check results :param bool embed_last_task_failure: embeds the last task failure :param bool embed_failures: shorthand for embed_last_task_failure :param bool embed_task_stats: embed task stats in result :returns: application :rtype: :class:`marathon.models.app.MarathonApp` """
params = {} embed_params = { 'app.tasks': embed_tasks, 'app.counts': embed_counts, 'app.deployments': embed_deployments, 'app.readiness': embed_readiness, 'app.lastTaskFailure': embed_last_task_failure, 'app.failures': embed_failures, 'app.taskStats': embed_task_stats } filtered_embed_params = [k for (k, v) in embed_params.items() if v] if filtered_embed_params: params['embed'] = filtered_embed_params response = self._do_request( 'GET', '/v2/apps/{app_id}'.format(app_id=app_id), params=params) return self._parse_response(response, MarathonApp, resource_name='app')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_app(self, app_id, app, force=False, minimal=True): """Update an app. Applies writable settings in `app` to `app_id` Note: this method can not be used to rename apps. :param str app_id: target application ID :param app: application settings :type app: :class:`marathon.models.app.MarathonApp` :param bool force: apply even if a deployment is in progress :param bool minimal: ignore nulls and empty collections :returns: a dict containing the deployment id and version :rtype: dict """
# Changes won't take if version is set - blank it for convenience app.version = None params = {'force': force} data = app.to_json(minimal=minimal) response = self._do_request( 'PUT', '/v2/apps/{app_id}'.format(app_id=app_id), params=params, data=data) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_apps(self, apps, force=False, minimal=True): """Update multiple apps. Applies writable settings in elements of apps either by upgrading existing ones or creating new ones :param apps: sequence of application settings :param bool force: apply even if a deployment is in progress :param bool minimal: ignore nulls and empty collections :returns: a dict containing the deployment id and version :rtype: dict """
json_repr_apps = [] for app in apps: # Changes won't take if version is set - blank it for convenience app.version = None json_repr_apps.append(app.json_repr(minimal=minimal)) params = {'force': force} encoder = MarathonMinimalJsonEncoder if minimal else MarathonJsonEncoder data = json.dumps(json_repr_apps, cls=encoder, sort_keys=True) response = self._do_request( 'PUT', '/v2/apps', params=params, data=data) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def rollback_app(self, app_id, version, force=False): """Roll an app back to a previous version. :param str app_id: application ID :param str version: application version :param bool force: apply even if a deployment is in progress :returns: a dict containing the deployment id and version :rtype: dict """
params = {'force': force} data = json.dumps({'version': version}) response = self._do_request( 'PUT', '/v2/apps/{app_id}'.format(app_id=app_id), params=params, data=data) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_app(self, app_id, force=False): """Stop and destroy an app. :param str app_id: application ID :param bool force: apply even if a deployment is in progress :returns: a dict containing the deployment id and version :rtype: dict """
params = {'force': force} response = self._do_request( 'DELETE', '/v2/apps/{app_id}'.format(app_id=app_id), params=params) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scale_app(self, app_id, instances=None, delta=None, force=False): """Scale an app. Scale an app to a target number of instances (with `instances`), or scale the number of instances up or down by some delta (`delta`). If the resulting number of instances would be negative, desired instances will be set to zero. If both `instances` and `delta` are passed, use `instances`. :param str app_id: application ID :param int instances: [optional] the number of instances to scale to :param int delta: [optional] the number of instances to scale up or down by :param bool force: apply even if a deployment is in progress :returns: a dict containing the deployment id and version :rtype: dict """
if instances is None and delta is None: marathon.log.error('instances or delta must be passed') return try: app = self.get_app(app_id) except NotFoundError: marathon.log.error('App "{app}" not found'.format(app=app_id)) return desired = instances if instances is not None else ( app.instances + delta) return self.update_app(app.id, MarathonApp(instances=desired), force=force)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_group(self, group): """Create and start a group. :param :class:`marathon.models.group.MarathonGroup` group: the group to create :returns: success :rtype: dict containing the version ID """
data = group.to_json() response = self._do_request('POST', '/v2/groups', data=data) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_group(self, group_id): """Get a single group. :param str group_id: group ID :returns: group :rtype: :class:`marathon.models.group.MarathonGroup` """
response = self._do_request( 'GET', '/v2/groups/{group_id}'.format(group_id=group_id)) return self._parse_response(response, MarathonGroup)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_group(self, group_id, group, force=False, minimal=True): """Update a group. Applies writable settings in `group` to `group_id` Note: this method can not be used to rename groups. :param str group_id: target group ID :param group: group settings :type group: :class:`marathon.models.group.MarathonGroup` :param bool force: apply even if a deployment is in progress :param bool minimal: ignore nulls and empty collections :returns: a dict containing the deployment id and version :rtype: dict """
# Changes won't take if version is set - blank it for convenience group.version = None params = {'force': force} data = group.to_json(minimal=minimal) response = self._do_request( 'PUT', '/v2/groups/{group_id}'.format(group_id=group_id), data=data, params=params) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def rollback_group(self, group_id, version, force=False): """Roll a group back to a previous version. :param str group_id: group ID :param str version: group version :param bool force: apply even if a deployment is in progress :returns: a dict containing the deployment id and version :rtype: dict """
params = {'force': force} response = self._do_request( 'PUT', '/v2/groups/{group_id}/versions/{version}'.format( group_id=group_id, version=version), params=params) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_group(self, group_id, force=False): """Stop and destroy a group. :param str group_id: group ID :param bool force: apply even if a deployment is in progress :returns: a dict containing the deleted version :rtype: dict """
params = {'force': force} response = self._do_request( 'DELETE', '/v2/groups/{group_id}'.format(group_id=group_id), params=params) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scale_group(self, group_id, scale_by): """Scale a group by a factor. :param str group_id: group ID :param int scale_by: factor to scale by :returns: a dict containing the deployment id and version :rtype: dict """
data = {'scaleBy': scale_by} response = self._do_request( 'PUT', '/v2/groups/{group_id}'.format(group_id=group_id), data=json.dumps(data)) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_tasks(self, app_id=None, **kwargs): """List running tasks, optionally filtered by app_id. :param str app_id: if passed, only show tasks for this application :param kwargs: arbitrary search filters :returns: list of tasks :rtype: list[:class:`marathon.models.task.MarathonTask`] """
response = self._do_request( 'GET', '/v2/apps/%s/tasks' % app_id if app_id else '/v2/tasks') tasks = self._parse_response( response, MarathonTask, is_list=True, resource_name='tasks') [setattr(t, 'app_id', app_id) for t in tasks if app_id and t.app_id is None] for k, v in kwargs.items(): tasks = [o for o in tasks if getattr(o, k) == v] return tasks
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kill_given_tasks(self, task_ids, scale=False, force=None): """Kill a list of given tasks. :param list[str] task_ids: tasks to kill :param bool scale: if true, scale down the app by the number of tasks killed :param bool force: if true, ignore any current running deployments :return: True on success :rtype: bool """
params = {'scale': scale} if force is not None: params['force'] = force data = json.dumps({"ids": task_ids}) response = self._do_request( 'POST', '/v2/tasks/delete', params=params, data=data) return response == 200
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kill_tasks(self, app_id, scale=False, wipe=False, host=None, batch_size=0, batch_delay=0): """Kill all tasks belonging to app. :param str app_id: application ID :param bool scale: if true, scale down the app by the number of tasks killed :param str host: if provided, only terminate tasks on this Mesos slave :param int batch_size: if non-zero, terminate tasks in groups of this size :param int batch_delay: time (in seconds) to wait in between batched kills. If zero, automatically determine :returns: list of killed tasks :rtype: list[:class:`marathon.models.task.MarathonTask`] """
def batch(iterable, size): sourceiter = iter(iterable) while True: batchiter = itertools.islice(sourceiter, size) yield itertools.chain([next(batchiter)], batchiter) if batch_size == 0: # Terminate all at once params = {'scale': scale, 'wipe': wipe} if host: params['host'] = host response = self._do_request( 'DELETE', '/v2/apps/{app_id}/tasks'.format(app_id=app_id), params) # Marathon is inconsistent about what type of object it returns on the multi # task deletion endpoint, depending on the version of Marathon. See: # https://github.com/mesosphere/marathon/blob/06a6f763a75fb6d652b4f1660685ae234bd15387/src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala#L88-L95 if "tasks" in response.json(): return self._parse_response(response, MarathonTask, is_list=True, resource_name='tasks') else: return response.json() else: # Terminate in batches tasks = self.list_tasks( app_id, host=host) if host else self.list_tasks(app_id) for tbatch in batch(tasks, batch_size): killed_tasks = [self.kill_task(app_id, t.id, scale=scale, wipe=wipe) for t in tbatch] # Pause until the tasks have been killed to avoid race # conditions killed_task_ids = set(t.id for t in killed_tasks) running_task_ids = killed_task_ids while killed_task_ids.intersection(running_task_ids): time.sleep(1) running_task_ids = set( t.id for t in self.get_app(app_id).tasks) if batch_delay == 0: # Pause until the replacement tasks are healthy desired_instances = self.get_app(app_id).instances running_instances = 0 while running_instances < desired_instances: time.sleep(1) running_instances = sum( t.started_at is None for t in self.get_app(app_id).tasks) else: time.sleep(batch_delay) return tasks
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kill_task(self, app_id, task_id, scale=False, wipe=False): """Kill a task. :param str app_id: application ID :param str task_id: the task to kill :param bool scale: if true, scale down the app by one if the task exists :returns: the killed task :rtype: :class:`marathon.models.task.MarathonTask` """
params = {'scale': scale, 'wipe': wipe} response = self._do_request('DELETE', '/v2/apps/{app_id}/tasks/{task_id}' .format(app_id=app_id, task_id=task_id), params) # Marathon is inconsistent about what type of object it returns on the multi # task deletion endpoint, depending on the version of Marathon. See: # https://github.com/mesosphere/marathon/blob/06a6f763a75fb6d652b4f1660685ae234bd15387/src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala#L88-L95 if "task" in response.json(): return self._parse_response(response, MarathonTask, is_list=False, resource_name='task') else: return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_versions(self, app_id): """List the versions of an app. :param str app_id: application ID :returns: list of versions :rtype: list[str] """
response = self._do_request( 'GET', '/v2/apps/{app_id}/versions'.format(app_id=app_id)) return [version for version in response.json()['versions']]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_version(self, app_id, version): """Get the configuration of an app at a specific version. :param str app_id: application ID :param str version: application version :return: application configuration :rtype: :class:`marathon.models.app.MarathonApp` """
response = self._do_request('GET', '/v2/apps/{app_id}/versions/{version}' .format(app_id=app_id, version=version)) return MarathonApp.from_json(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_event_subscription(self, url): """Register a callback URL as an event subscriber. :param str url: callback URL :returns: the created event subscription :rtype: dict """
params = {'callbackUrl': url} response = self._do_request('POST', '/v2/eventSubscriptions', params) return response.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_event_subscription(self, url): """Deregister a callback URL as an event subscriber. :param str url: callback URL :returns: the deleted event subscription :rtype: dict """
params = {'callbackUrl': url} response = self._do_request('DELETE', '/v2/eventSubscriptions', params) return response.json()