text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Create instrument metadata object from csv. <END_TASK> <USER_TASK:> Description: def from_csv(cls, name=None, col_names=None, sep=None, **kwargs): """Create instrument metadata object from csv. Parameters ---------- name : string absolute filename for csv file or name of file stored in pandas instruments location col_names : list-like collection of strings column names in csv and resultant meta object sep : string column seperator for supplied csv filename Note ---- column names must include at least ['name', 'long_name', 'units'], assumed if col_names is None. """
import pysat req_names = ['name','long_name','units'] if col_names is None: col_names = req_names elif not all([i in col_names for i in req_names]): raise ValueError('col_names must include name, long_name, units.') if sep is None: sep = ',' if name is None: raise ValueError('Must supply an instrument name or file path.') elif not isinstance(name, str): raise ValueError('keyword name must be related to a string') elif not os.path.isfile(name): # Not a real file, assume input is a pysat instrument name # and look in the standard pysat location. test = os.path.join(pysat.__path__[0],'instruments',name) if os.path.isfile(test): name = test else: #trying to form an absolute path for success test = os.path.abspath(name) if not os.path.isfile(test): raise ValueError("Unable to create valid file path.") else: #success name = test mdata = pds.read_csv(name, names=col_names, sep=sep, **kwargs) if not mdata.empty: # make sure the data name is the index mdata.index = mdata['name'] del mdata['name'] return cls(metadata=mdata) else: raise ValueError('Unable to retrieve information from ' + name)
<SYSTEM_TASK:> A signal receiver decorator that fetch the complete instance from db when <END_TASK> <USER_TASK:> Description: def nonraw_instance(receiver): """ A signal receiver decorator that fetch the complete instance from db when it's passed as raw """
@wraps(receiver) def wrapper(sender, instance, raw, using, **kwargs): if raw: instance = sender._default_manager.using(using).get(pk=instance.pk) return receiver(sender=sender, raw=raw, instance=instance, using=using, **kwargs) return wrapper
<SYSTEM_TASK:> This is used to pass data required for deletion to the post_delete <END_TASK> <USER_TASK:> Description: def base_definition_pre_delete(sender, instance, **kwargs): """ This is used to pass data required for deletion to the post_delete signal that is no more available thereafter. """
# see CASCADE_MARK_ORIGIN's docstring cascade_deletion_origin = popattr( instance._state, '_cascade_deletion_origin', None ) if cascade_deletion_origin == 'model_def': return if (instance.base and issubclass(instance.base, models.Model) and instance.base._meta.abstract): instance._state._deletion = instance.model_def.model_class().render_state()
<SYSTEM_TASK:> Make sure to delete fields inherited from an abstract model base. <END_TASK> <USER_TASK:> Description: def base_definition_post_delete(sender, instance, **kwargs): """ Make sure to delete fields inherited from an abstract model base. """
if hasattr(instance._state, '_deletion'): # Make sure to flatten abstract bases since Django # migrations can't deal with them. model = popattr(instance._state, '_deletion') for field in instance.base._meta.fields: perform_ddl('remove_field', model, field)
<SYSTEM_TASK:> When proxy field definitions are loaded from a fixture they're not <END_TASK> <USER_TASK:> Description: def raw_field_definition_proxy_post_save(sender, instance, raw, **kwargs): """ When proxy field definitions are loaded from a fixture they're not passing through the `field_definition_post_save` signal. Make sure they are. """
if raw: model_class = instance.content_type.model_class() opts = model_class._meta if opts.proxy and opts.concrete_model is sender: field_definition_post_save( sender=model_class, instance=instance.type_cast(), raw=raw, **kwargs )
<SYSTEM_TASK:> This signal is connected by all FieldDefinition subclasses <END_TASK> <USER_TASK:> Description: def field_definition_post_save(sender, instance, created, raw, **kwargs): """ This signal is connected by all FieldDefinition subclasses see comment in FieldDefinitionBase for more details """
model_class = instance.model_def.model_class().render_state() field = instance.construct_for_migrate() field.model = model_class if created: if hasattr(instance._state, '_creation_default_value'): field.default = instance._state._creation_default_value delattr(instance._state, '_creation_default_value') add_column = popattr(instance._state, '_add_column', True) if add_column: perform_ddl('add_field', model_class, field) # If the field definition is raw we must re-create the model class # since ModelDefinitionAttribute.save won't be called if raw: instance.model_def.model_class().mark_as_obsolete() else: old_field = instance._state._pre_save_field delattr(instance._state, '_pre_save_field') perform_ddl('alter_field', model_class, old_field, field, strict=True)
<SYSTEM_TASK:> An helper that correctly deepcopy model cache state <END_TASK> <USER_TASK:> Description: def _app_cache_deepcopy(obj): """ An helper that correctly deepcopy model cache state """
if isinstance(obj, defaultdict): return deepcopy(obj) elif isinstance(obj, dict): return type(obj)((_app_cache_deepcopy(key), _app_cache_deepcopy(val)) for key, val in obj.items()) elif isinstance(obj, list): return list(_app_cache_deepcopy(val) for val in obj) elif isinstance(obj, AppConfig): app_conf = Empty() app_conf.__class__ = AppConfig app_conf.__dict__ = _app_cache_deepcopy(obj.__dict__) return app_conf return obj
<SYSTEM_TASK:> A context manager that restore model cache state as it was before <END_TASK> <USER_TASK:> Description: def app_cache_restorer(): """ A context manager that restore model cache state as it was before entering context. """
state = _app_cache_deepcopy(apps.__dict__) try: yield state finally: with apps_lock(): apps.__dict__ = state # Rebind the app registry models cache to # individual app config ones. for app_conf in apps.get_app_configs(): app_conf.models = apps.all_models[app_conf.label] apps.clear_cache()
<SYSTEM_TASK:> Custom on_delete handler which sets _cascade_deletion_origin on the _state <END_TASK> <USER_TASK:> Description: def CASCADE_MARK_ORIGIN(collector, field, sub_objs, using): """ Custom on_delete handler which sets _cascade_deletion_origin on the _state of the all relating objects that will deleted. We use this handler on ModelDefinitionAttribute.model_def, so when we delete a ModelDefinition we can skip field_definition_post_delete and base_definition_post_delete and avoid an incremental columns deletion before the entire table is dropped. """
CASCADE(collector, field, sub_objs, using) if sub_objs: for obj in sub_objs: obj._state._cascade_deletion_origin = field.name
<SYSTEM_TASK:> Make sure all related model class are created and marked as dependency <END_TASK> <USER_TASK:> Description: def mutable_model_prepared(signal, sender, definition, existing_model_class, **kwargs): """ Make sure all related model class are created and marked as dependency when a mutable model class is prepared """
referenced_models = set() # Collect all model class the obsolete model class was referring to if existing_model_class: for field in existing_model_class._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) # Add sender as a dependency of all mutable models it refers to for field in sender._meta.local_fields: if isinstance(field, RelatedField): remote_field_model = get_remote_field_model(field) if not isinstance(remote_field_model, string_types): referenced_models.add(remote_field_model) if (issubclass(remote_field_model, MutableModel) and remote_field_model._definition != sender._definition): remote_field_model._dependencies.add(sender._definition) # Mark all model referring to this one as dependencies related_model_defs = ModelDefinition.objects.filter( Q(fielddefinitions__foreignkeydefinition__to=definition) | Q(fielddefinitions__manytomanyfielddefinition__to=definition) ).distinct() for model_def in related_model_defs: if model_def != definition: # Generate model class from definition and add it as a dependency sender._dependencies.add(model_def.model_class()._definition) # Clear the referenced models opts related cache for model_class in referenced_models: clear_opts_related_cache(model_class)
<SYSTEM_TASK:> Helper used to unpickle MutableModel model class from their definition <END_TASK> <USER_TASK:> Description: def _model_class_from_pk(definition_cls, definition_pk): """ Helper used to unpickle MutableModel model class from their definition pk. """
try: return definition_cls.objects.get(pk=definition_pk).model_class() except definition_cls.DoesNotExist: pass
<SYSTEM_TASK:> Make sure the lookup makes sense <END_TASK> <USER_TASK:> Description: def clean(self): """ Make sure the lookup makes sense """
if self.lookup == '?': # Randomly sort return else: lookups = self.lookup.split(LOOKUP_SEP) opts = self.model_def.model_class()._meta valid = True while len(lookups): lookup = lookups.pop(0) try: field = opts.get_field(lookup) except FieldDoesNotExist: valid = False else: if isinstance(field, models.ForeignKey): opts = get_remote_field_model(field)._meta elif len(lookups): # Cannot go any deeper valid = False finally: if not valid: msg = _("This field doesn't exist") raise ValidationError({'lookup': [msg]})
<SYSTEM_TASK:> Compute the time-derivative of a Lorentz system. <END_TASK> <USER_TASK:> Description: def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0): """Compute the time-derivative of a Lorentz system."""
return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
<SYSTEM_TASK:> Generate an identity key pair. Clients should only do this once, <END_TASK> <USER_TASK:> Description: def generateIdentityKeyPair(): """ Generate an identity key pair. Clients should only do this once, at install time. @return the generated IdentityKeyPair. """
keyPair = Curve.generateKeyPair() publicKey = IdentityKey(keyPair.getPublicKey()) serialized = '0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c' \ 'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293' \ 'edfbcd82129b14a88791ac81365c' serialized = binascii.unhexlify(serialized.encode()) identityKeyPair = IdentityKeyPair(publicKey, keyPair.getPrivateKey()) return identityKeyPair
<SYSTEM_TASK:> Generate a list of PreKeys. Clients should do this at install time, and <END_TASK> <USER_TASK:> Description: def generatePreKeys(start, count): """ Generate a list of PreKeys. Clients should do this at install time, and subsequently any time the list of PreKeys stored on the server runs low. PreKey IDs are shorts, so they will eventually be repeated. Clients should store PreKeys in a circular buffer, so that they are repeated as infrequently as possible. @param start The starting PreKey ID, inclusive. @param count The number of PreKeys to generate. @return the list of generated PreKeyRecords. """
results = [] start -= 1 for i in range(0, count): preKeyId = ((start + i) % (Medium.MAX_VALUE - 1)) + 1 results.append(PreKeyRecord(preKeyId, Curve.generateKeyPair())) return results
<SYSTEM_TASK:> Validate that to_value is a valid choice and that to_value is a valid transition from from_value. <END_TASK> <USER_TASK:> Description: def validate_valid_transition(enum, from_value, to_value): """ Validate that to_value is a valid choice and that to_value is a valid transition from from_value. """
validate_available_choice(enum, to_value) if hasattr(enum, '_transitions') and not enum.is_valid_transition(from_value, to_value): message = _(six.text_type('{enum} can not go from "{from_value}" to "{to_value}"')) raise InvalidStatusOperationError(message.format( enum=enum.__name__, from_value=enum.name(from_value), to_value=enum.name(to_value) or to_value ))
<SYSTEM_TASK:> Validate that to_value is defined as a value in enum. <END_TASK> <USER_TASK:> Description: def validate_available_choice(enum, to_value): """ Validate that to_value is defined as a value in enum. """
if to_value is None: return if type(to_value) is not int: try: to_value = int(to_value) except ValueError: message_str = "'{value}' cannot be converted to int" message = _(six.text_type(message_str)) raise InvalidStatusOperationError(message.format(value=to_value)) if to_value not in list(dict(enum.choices()).keys()): message = _(six.text_type('Select a valid choice. {value} is not one of the available choices.')) raise InvalidStatusOperationError(message.format(value=to_value))
<SYSTEM_TASK:> Open the connection wit the device. <END_TASK> <USER_TASK:> Description: def open(self): """Open the connection wit the device."""
try: self.device.open() except ConnectTimeoutError as cte: raise ConnectionException(cte.message) self.device.timeout = self.timeout self.device._conn._session.transport.set_keepalive(self.keepalive) if hasattr(self.device, "cu"): # make sure to remove the cu attr from previous session # ValueError: requested attribute name cu already exists del self.device.cu self.device.bind(cu=Config) if self.config_lock: self._lock()
<SYSTEM_TASK:> Compare candidate config with running. <END_TASK> <USER_TASK:> Description: def compare_config(self): """Compare candidate config with running."""
diff = self.device.cu.diff() if diff is None: return '' else: return diff.strip()
<SYSTEM_TASK:> Return the NTP peers configured on the device. <END_TASK> <USER_TASK:> Description: def get_ntp_peers(self): """Return the NTP peers configured on the device."""
ntp_table = junos_views.junos_ntp_peers_config_table(self.device) ntp_table.get() ntp_peers = ntp_table.items() if not ntp_peers: return {} return {napalm_base.helpers.ip(peer[0]): {} for peer in ntp_peers}
<SYSTEM_TASK:> Return the NTP servers configured on the device. <END_TASK> <USER_TASK:> Description: def get_ntp_servers(self): """Return the NTP servers configured on the device."""
ntp_table = junos_views.junos_ntp_servers_config_table(self.device) ntp_table.get() ntp_servers = ntp_table.items() if not ntp_servers: return {} return {napalm_base.helpers.ip(server[0]): {} for server in ntp_servers}
<SYSTEM_TASK:> Return the configuration of the RPM probes. <END_TASK> <USER_TASK:> Description: def get_probes_config(self): """Return the configuration of the RPM probes."""
probes = {} probes_table = junos_views.junos_rpm_probes_config_table(self.device) probes_table.get() probes_table_items = probes_table.items() for probe_test in probes_table_items: test_name = py23_compat.text_type(probe_test[0]) test_details = { p[0]: p[1] for p in probe_test[1] } probe_name = napalm_base.helpers.convert( py23_compat.text_type, test_details.pop('probe_name')) target = napalm_base.helpers.convert( py23_compat.text_type, test_details.pop('target', '')) test_interval = napalm_base.helpers.convert(int, test_details.pop('test_interval', '0')) probe_count = napalm_base.helpers.convert(int, test_details.pop('probe_count', '0')) probe_type = napalm_base.helpers.convert( py23_compat.text_type, test_details.pop('probe_type', '')) source = napalm_base.helpers.convert( py23_compat.text_type, test_details.pop('source_address', '')) if probe_name not in probes.keys(): probes[probe_name] = {} probes[probe_name][test_name] = { 'probe_type': probe_type, 'target': target, 'source': source, 'probe_count': probe_count, 'test_interval': test_interval } return probes
<SYSTEM_TASK:> Remove all auth tokens owned by request.user. <END_TASK> <USER_TASK:> Description: def get(self, request, format=None): """ Remove all auth tokens owned by request.user. """
tokens = Token.objects.filter(user=request.user) for token in tokens: token.delete() content = {'success': _('User logged out.')} return Response(content, status=status.HTTP_200_OK)
<SYSTEM_TASK:> Set attributes to dictionary values so can access via dot notation. <END_TASK> <USER_TASK:> Description: def _set_attrs_to_values(self, response={}): """ Set attributes to dictionary values so can access via dot notation. """
for key in response.keys(): setattr(self, key, response[key])
<SYSTEM_TASK:> Add flask route to autodoc for automatic documentation <END_TASK> <USER_TASK:> Description: def doc(self, groups=None, set_location=True, **properties): """Add flask route to autodoc for automatic documentation Any route decorated with this method will be added to the list of routes to be documented by the generate() or html() methods. By default, the route is added to the 'all' group. By specifying group or groups argument, the route can be added to one or multiple other groups as well, besides the 'all' group. If set_location is True, the location of the function will be stored. NOTE: this assumes that the decorator is placed just before the function (in the normal way). Custom parameters may also be passed in beyond groups, if they are named something not already in the dict descibed in the docstring for the generare() function, they will be added to the route's properties, which can be accessed from the template. If a parameter is passed in with a name that is already in the dict, but not of a reserved name, the passed parameter overrides that dict value. """
def decorator(f): # Get previous group list (if any) if f in self.func_groups: groupset = self.func_groups[f] else: groupset = set() # Set group[s] if type(groups) is list: groupset.update(groups) elif type(groups) is str: groupset.add(groups) groupset.add('all') self.func_groups[f] = groupset self.func_props[f] = properties # Set location if set_location: caller_frame = inspect.stack()[1] self.func_locations[f] = { 'filename': caller_frame[1], 'line': caller_frame[2], } return f return decorator
<SYSTEM_TASK:> Return true if OK to continue with close or quit or whatever <END_TASK> <USER_TASK:> Description: def unsaved_files_dialog( self, all_files=False, with_cancel=True, with_discard=True): """Return true if OK to continue with close or quit or whatever"""
for image in self.images: if image.metadata.changed() and (all_files or image.selected): break else: return True dialog = QtWidgets.QMessageBox() dialog.setWindowTitle(self.tr('Photini: unsaved data')) dialog.setText(self.tr('<h3>Some images have unsaved metadata.</h3>')) dialog.setInformativeText(self.tr('Do you want to save your changes?')) dialog.setIcon(QtWidgets.QMessageBox.Warning) buttons = QtWidgets.QMessageBox.Save if with_cancel: buttons |= QtWidgets.QMessageBox.Cancel if with_discard: buttons |= QtWidgets.QMessageBox.Discard dialog.setStandardButtons(buttons) dialog.setDefaultButton(QtWidgets.QMessageBox.Save) result = dialog.exec_() if result == QtWidgets.QMessageBox.Save: self._save_files() return True return result == QtWidgets.QMessageBox.Discard
<SYSTEM_TASK:> Finds another node by XPath originating at the current node. <END_TASK> <USER_TASK:> Description: def xpath(self, xpath): """ Finds another node by XPath originating at the current node. """
return [self.get_node_factory().create(node_id) for node_id in self._get_xpath_ids(xpath).split(",") if node_id]
<SYSTEM_TASK:> Finds another node by a CSS selector relative to the current node. <END_TASK> <USER_TASK:> Description: def css(self, css): """ Finds another node by a CSS selector relative to the current node. """
return [self.get_node_factory().create(node_id) for node_id in self._get_css_ids(css).split(",") if node_id]
<SYSTEM_TASK:> Returns the value of a boolean HTML attribute like `checked` or `disabled` <END_TASK> <USER_TASK:> Description: def get_bool_attr(self, name): """ Returns the value of a boolean HTML attribute like `checked` or `disabled` """
val = self.get_attr(name) return val is not None and val.lower() in ("true", name)
<SYSTEM_TASK:> Sets the value of an attribute. <END_TASK> <USER_TASK:> Description: def set_attr(self, name, value): """ Sets the value of an attribute. """
self.exec_script("node.setAttribute(%s, %s)" % (repr(name), repr(value)))
<SYSTEM_TASK:> Sets a HTTP header for future requests. <END_TASK> <USER_TASK:> Description: def set_header(self, key, value): """ Sets a HTTP header for future requests. """
self.conn.issue_command("Header", _normalize_header(key), value)
<SYSTEM_TASK:> Returns a list of the last HTTP response headers. <END_TASK> <USER_TASK:> Description: def headers(self): """ Returns a list of the last HTTP response headers. Header keys are normalized to capitalized form, as in `User-Agent`. """
headers = self.conn.issue_command("Headers") res = [] for header in headers.split("\r"): key, value = header.split(": ", 1) for line in value.split("\n"): res.append((_normalize_header(key), line)) return res
<SYSTEM_TASK:> Evaluates a piece of Javascript in the context of the current page and <END_TASK> <USER_TASK:> Description: def eval_script(self, expr): """ Evaluates a piece of Javascript in the context of the current page and returns its value. """
ret = self.conn.issue_command("Evaluate", expr) return json.loads("[%s]" % ret)[0]
<SYSTEM_TASK:> Returns a list of all cookies in cookie string format. <END_TASK> <USER_TASK:> Description: def cookies(self): """ Returns a list of all cookies in cookie string format. """
return [line.strip() for line in self.conn.issue_command("GetCookies").split("\n") if line.strip()]
<SYSTEM_TASK:> Sets custom HTML in our Webkit session and allows to specify a fake URL. <END_TASK> <USER_TASK:> Description: def set_html(self, html, url = None): """ Sets custom HTML in our Webkit session and allows to specify a fake URL. Scripts and CSS is dynamically fetched as if the HTML had been loaded from the given URL. """
if url: self.conn.issue_command('SetHtml', html, url) else: self.conn.issue_command('SetHtml', html)
<SYSTEM_TASK:> Sets a custom HTTP proxy to use for future requests. <END_TASK> <USER_TASK:> Description: def set_proxy(self, host = "localhost", port = 0, user = "", password = ""): """ Sets a custom HTTP proxy to use for future requests. """
self.conn.issue_command("SetProxy", host, port, user, password)
<SYSTEM_TASK:> Returns a new socket connection to this server. <END_TASK> <USER_TASK:> Description: def connect(self): """ Returns a new socket connection to this server. """
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(("127.0.0.1", self._port)) return sock
<SYSTEM_TASK:> Consume one line from the stream. <END_TASK> <USER_TASK:> Description: def read_line(self): """ Consume one line from the stream. """
while True: newline_idx = self.buf.find(b"\n") if newline_idx >= 0: res = self.buf[:newline_idx] self.buf = self.buf[newline_idx + 1:] return res chunk = self.f.recv(4096) if not chunk: raise EndOfStreamError() self.buf += chunk
<SYSTEM_TASK:> Consume `n` characters from the stream. <END_TASK> <USER_TASK:> Description: def read(self, n): """ Consume `n` characters from the stream. """
while len(self.buf) < n: chunk = self.f.recv(4096) if not chunk: raise EndOfStreamError() self.buf += chunk res, self.buf = self.buf[:n], self.buf[n:] return res
<SYSTEM_TASK:> Reads a complete response packet from the server <END_TASK> <USER_TASK:> Description: def _read_response(self): """ Reads a complete response packet from the server """
result = self.buf.read_line().decode("utf-8") if not result: raise NoResponseError("No response received from server.") msg = self._read_message() if result != "ok": raise InvalidResponseError(msg) return msg
<SYSTEM_TASK:> Reads a single size-annotated message from the server <END_TASK> <USER_TASK:> Description: def _read_message(self): """ Reads a single size-annotated message from the server """
size = int(self.buf.read_line().decode("utf-8")) return self.buf.read(size).decode("utf-8")
<SYSTEM_TASK:> Enable or disable interrupts by setting enabled to True or False. <END_TASK> <USER_TASK:> Description: def set_interrupt(self, enabled): """Enable or disable interrupts by setting enabled to True or False."""
enable_reg = self._readU8(TCS34725_ENABLE) if enabled: enable_reg |= TCS34725_ENABLE_AIEN else: enable_reg &= ~TCS34725_ENABLE_AIEN self._write8(TCS34725_ENABLE, enable_reg) time.sleep(1)
<SYSTEM_TASK:> Converts a dictionary to a string of ``key=\"value\"`` pairs. <END_TASK> <USER_TASK:> Description: def _dict_to_html_attributes(d): """ Converts a dictionary to a string of ``key=\"value\"`` pairs. If ``None`` is provided as the dictionary an empty string is returned, i.e. no html attributes are generated. Parameters ---------- d : dict Dictionary to convert to html attributes. Returns ------- str String of HTML attributes in the form ``key_i=\"value_i\" ... key_N=\"value_N\"``, where ``N`` is the total number of ``(key, value)`` pairs. """
if d is None: return "" return "".join(" {}=\"{}\"".format(key, value) for key, value in iter(d.items()))
<SYSTEM_TASK:> Detects if all entries in an list of ``dict``'s have identical keys. <END_TASK> <USER_TASK:> Description: def _list_of_dicts_to_column_headers(list_of_dicts): """ Detects if all entries in an list of ``dict``'s have identical keys. Returns the keys if all keys are the same and ``None`` otherwise. Parameters ---------- list_of_dicts : list List of dictionaries to test for identical keys. Returns ------- list or None List of column headers if all dictionary posessed the same keys. Returns ``None`` otherwise. """
if len(list_of_dicts) < 2 or not all(isinstance(item, dict) for item in list_of_dicts): return None column_headers = list_of_dicts[0].keys() for d in list_of_dicts[1:]: if len(d.keys()) != len(column_headers) or not all(header in d for header in column_headers): return None return column_headers
<SYSTEM_TASK:> Recursively generates HTML for the current entry. <END_TASK> <USER_TASK:> Description: def _markup(self, entry): """ Recursively generates HTML for the current entry. Parameters ---------- entry : object Object to convert to HTML. Maybe be a single entity or contain multiple and/or nested objects. Returns ------- str String of HTML formatted json. """
if entry is None: return "" if isinstance(entry, list): list_markup = "<ul>" for item in entry: list_markup += "<li>{:s}</li>".format(self._markup(item)) list_markup += "</ul>" return list_markup if isinstance(entry, dict): return self.convert(entry) # default to stringifying entry return str(entry)
<SYSTEM_TASK:> If all keys in a list of dicts are identical, values from each ``dict`` <END_TASK> <USER_TASK:> Description: def _maybe_club(self, list_of_dicts): """ If all keys in a list of dicts are identical, values from each ``dict`` are clubbed, i.e. inserted under a common column heading. If the keys are not identical ``None`` is returned, and the list should be converted to HTML per the normal ``convert`` function. Parameters ---------- list_of_dicts : list List to attempt to club. Returns ------- str or None String of HTML if list was successfully clubbed. Returns ``None`` otherwise. Example ------- Given the following json object:: { "sampleData": [ {"a":1, "b":2, "c":3}, {"a":5, "b":6, "c":7}] } Calling ``_maybe_club`` would result in the following HTML table: _____________________________ | | | | | | | a | c | b | | sampleData |---|---|---| | | 1 | 3 | 2 | | | 5 | 7 | 6 | ----------------------------- Adapted from a contribution from @muellermichel to ``json2html``. """
column_headers = JsonConverter._list_of_dicts_to_column_headers(list_of_dicts) if column_headers is None: # common headers not found, return normal markup html_output = self._markup(list_of_dicts) else: html_output = self._table_opening_tag html_output += self._markup_header_row(column_headers) for list_entry in list_of_dicts: html_output += "<tr><td>" html_output += "</td><td>".join(self._markup(list_entry[column_header]) for column_header in column_headers) html_output += "</td></tr>" html_output += "</table>" return self._markup_table_cell(html_output)
<SYSTEM_TASK:> This structure is what Django wants when errors occur in templates. <END_TASK> <USER_TASK:> Description: def get_template_debug(template_name, error): ''' This structure is what Django wants when errors occur in templates. It gives the user a nice stack trace in the error page during debug. ''' # This is taken from mako.exceptions.html_error_template(), which has an issue # in Py3 where files get loaded as bytes but `lines = src.split('\n')` below # splits with a string. Not sure if this is a bug or if I'm missing something, # but doing a custom debugging template allows a workaround as well as a custom # DMP look. # I used to have a file in the templates directory for this, but too many users # reported TemplateNotFound errors. This function is a bit of a hack, but it only # happens during development (and mako.exceptions does this same thing). # /justification stacktrace_template = MakoTemplate(r""" <%! from mako.exceptions import syntax_highlight, pygments_html_formatter %> <style> .stacktrace { margin:5px 5px 5px 5px; } .highlight { padding:0px 10px 0px 10px; background-color:#9F9FDF; } .nonhighlight { padding:0px; background-color:#DFDFDF; } .sample { padding:10px; margin:10px 10px 10px 10px; font-family:monospace; } .sampleline { padding:0px 10px 0px 10px; } .sourceline { margin:5px 5px 10px 5px; font-family:monospace;} .location { font-size:80%; } .highlight { white-space:pre; } .sampleline { white-space:pre; } % if pygments_html_formatter: ${pygments_html_formatter.get_style_defs() | n} .linenos { min-width: 2.5em; text-align: right; } pre { margin: 0; } .syntax-highlighted { padding: 0 10px; } .syntax-highlightedtable { border-spacing: 1px; } .nonhighlight { border-top: 1px solid #DFDFDF; border-bottom: 1px solid #DFDFDF; } .stacktrace .nonhighlight { margin: 5px 15px 10px; } .sourceline { margin: 0 0; font-family:monospace; } .code { background-color: #F8F8F8; width: 100%; } .error .code { background-color: #FFBDBD; } .error .syntax-highlighted { background-color: #FFBDBD; } % endif ## adjustments to Django css table.source { background-color: #fdfdfd; } table.source > tbody > tr > th { width: auto; } table.source > tbody > tr > td { font-family: inherit; white-space: normal; padding: 15px; } #template { background-color: #b3daff; } </style> <% src = tback.source line = tback.lineno if isinstance(src, bytes): src = src.decode() if src: lines = src.split('\n') else: lines = None %> <h3>${tback.errorname}: ${tback.message}</h3> % if lines: <div class="sample"> <div class="nonhighlight"> % for index in range(max(0, line-4),min(len(lines), line+5)): <% if pygments_html_formatter: pygments_html_formatter.linenostart = index + 1 %> % if index + 1 == line: <% if pygments_html_formatter: old_cssclass = pygments_html_formatter.cssclass pygments_html_formatter.cssclass = 'error ' + old_cssclass %> ${lines[index] | n,syntax_highlight(language='mako')} <% if pygments_html_formatter: pygments_html_formatter.cssclass = old_cssclass %> % else: ${lines[index] | n,syntax_highlight(language='mako')} % endif % endfor </div> </div> % endif <div class="stacktrace"> % for (filename, lineno, function, line) in tback.reverse_traceback: <div class="location">${filename}, line ${lineno}:</div> <div class="nonhighlight"> <% if pygments_html_formatter: pygments_html_formatter.linenostart = lineno %> <div class="sourceline">${line | n,syntax_highlight(filename)}</div> </div> % endfor </div> """
) tback = RichTraceback(error, error.__traceback__) lines = stacktrace_template.render_unicode(tback=tback) return { 'message': '', 'source_lines': [ ( '', mark_safe(lines) ), ], 'before': '', 'during': '', 'after': '', 'top': 0, 'bottom': 0, 'total': 0, 'line': tback.lineno or 0, 'name': template_name, 'start': 0, 'end': 0, }
<SYSTEM_TASK:> Run all the stages in protocol <END_TASK> <USER_TASK:> Description: def protocol(handler, cfg): """ Run all the stages in protocol Parameters ---------- handler : SystemHandler Container of initial conditions of simulation cfg : dict Imported YAML file. """
# Stages if 'stages' not in cfg: raise ValueError('Protocol must include stages of simulation') pos, vel, box = handler.positions, handler.velocities, handler.box stages = cfg.pop('stages') for stage_options in stages: options = DEFAULT_OPTIONS.copy() options.update(cfg) stage_system_options = prepare_system_options(stage_options) options.update(stage_options) options['system_options'].update(stage_system_options) stage = Stage(handler, positions=pos, velocities=vel, box=box, total_stages=len(stages), **options) pos, vel, box = stage.run() del stage
<SYSTEM_TASK:> Minimize energy of the system until meeting `tolerance` or <END_TASK> <USER_TASK:> Description: def minimize(self, tolerance=None, max_iterations=None): """ Minimize energy of the system until meeting `tolerance` or performing `max_iterations`. """
if tolerance is None: tolerance = self.minimization_tolerance if max_iterations is None: max_iterations = self.minimization_max_iterations self.simulation.minimizeEnergy(tolerance * u.kilojoules_per_mole, max_iterations)
<SYSTEM_TASK:> Force that restrains atoms to fix their positions, while allowing <END_TASK> <USER_TASK:> Description: def restraint_force(self, indices=None, strength=5.0): """ Force that restrains atoms to fix their positions, while allowing tiny movement to resolve severe clashes and so on. Returns ------- force : simtk.openmm.CustomExternalForce A custom force to restrain the selected atoms """
if self.system.usesPeriodicBoundaryConditions(): expression = 'k*periodicdistance(x, y, z, x0, y0, z0)^2' else: expression = 'k*((x-x0)^2 + (y-y0)^2 + (z-z0)^2)' force = mm.CustomExternalForce(expression) force.addGlobalParameter('k', strength*u.kilocalories_per_mole/u.angstroms**2) force.addPerParticleParameter('x0') force.addPerParticleParameter('y0') force.addPerParticleParameter('z0') positions = self.positions if self.positions is not None else self.handler.positions if indices is None: indices = range(self.handler.topology.getNumAtoms()) for i, index in enumerate(indices): force.addParticle(i, positions[index].value_in_unit(u.nanometers)) return force
<SYSTEM_TASK:> Returns a list of atom indices corresponding to a MDTraj DSL <END_TASK> <USER_TASK:> Description: def subset(self, selector): """ Returns a list of atom indices corresponding to a MDTraj DSL query. Also will accept list of numbers, which will be coerced to int and returned. """
if isinstance(selector, (list, tuple)): return map(int, selector) selector = SELECTORS.get(selector, selector) mdtop = MDTrajTopology.from_openmm(self.handler.topology) return mdtop.select(selector)
<SYSTEM_TASK:> Handle Ctrl+C and accidental exceptions and attempt to save <END_TASK> <USER_TASK:> Description: def handle_exceptions(self, verbose=True): """ Handle Ctrl+C and accidental exceptions and attempt to save the current state of the simulation """
try: yield except (KeyboardInterrupt, Exception) as ex: if not self.attempt_rescue: raise ex if isinstance(ex, KeyboardInterrupt): reraise = False answer = timed_input('\n\nDo you want to save current state? (y/N): ') if answer and answer.lower() not in ('y', 'yes'): if verbose: sys.exit('Ok, bye!') else: reraise = True logger.error('\n\nAn error occurred: %s', ex) if verbose: logger.info('Saving state...') try: self.backup_simulation() except Exception: if verbose: logger.error('FAILED :(') else: if verbose: logger.info('SUCCESS!') finally: if reraise: raise ex sys.exit()
<SYSTEM_TASK:> Creates an emergency report run, .state included <END_TASK> <USER_TASK:> Description: def backup_simulation(self): """ Creates an emergency report run, .state included """
path = self.new_filename(suffix='_emergency.state') self.simulation.saveState(path) uses_pbc = self.system.usesPeriodicBoundaryConditions() state_kw = dict(getPositions=True, getVelocities=True, getForces=True, enforcePeriodicBox=uses_pbc, getParameters=True, getEnergy=True) state = self.simulation.context.getState(**state_kw) for reporter in self.simulation.reporters: if not isinstance(reporter, app.StateDataReporter): reporter.report(self.simulation, state)
<SYSTEM_TASK:> Get, parse and prepare input file. <END_TASK> <USER_TASK:> Description: def prepare_input(argv=None): """ Get, parse and prepare input file. """
p = ArgumentParser(description='InsiliChem Ommprotocol: ' 'easy to deploy MD protocols for OpenMM') p.add_argument('input', metavar='INPUT FILE', type=extant_file, help='YAML input file') p.add_argument('--version', action='version', version='%(prog)s v{}'.format(__version__)) p.add_argument('-c', '--check', action='store_true', help='Validate input file only') args = p.parse_args(argv if argv else sys.argv[1:]) jinja_env = jinja2.Environment(trim_blocks=True, lstrip_blocks=True) # Load config file with open(args.input) as f: rendered = jinja_env.from_string(f.read()).render() cfg = yaml.load(rendered, Loader=YamlLoader) # Paths and dirs from .md import SYSTEM_OPTIONS cfg['_path'] = os.path.abspath(args.input) cfg['system_options'] = prepare_system_options(cfg, defaults=SYSTEM_OPTIONS) cfg['outputpath'] = sanitize_path_for_file(cfg.get('outputpath', '.'), args.input) if not args.check: with ignored_exceptions(OSError): os.makedirs(cfg['outputpath']) handler = prepare_handler(cfg) return handler, cfg, args
<SYSTEM_TASK:> Load all files into single object. <END_TASK> <USER_TASK:> Description: def prepare_handler(cfg): """ Load all files into single object. """
positions, velocities, box = None, None, None _path = cfg.get('_path', './') forcefield = cfg.pop('forcefield', None) topology_args = sanitize_args_for_file(cfg.pop('topology'), _path) if 'checkpoint' in cfg: restart_args = sanitize_args_for_file(cfg.pop('checkpoint'), _path) restart = Restart.load(*restart_args) positions = restart.positions velocities = restart.velocities box = restart.box if 'positions' in cfg: positions_args = sanitize_args_for_file(cfg.pop('positions'), _path) positions = Positions.load(*positions_args) box = BoxVectors.load(*positions_args) if 'velocities' in cfg: velocities_args = sanitize_args_for_file(cfg.pop('velocities'), _path) velocities = Velocities.load(*velocities_args) if 'box' in cfg: box_args = sanitize_args_for_file(cfg.pop('box'), _path) box = BoxVectors.load(*box_args) options = {} for key in 'positions velocities box forcefield'.split(): value = locals()[key] if value is not None: options[key] = value return SystemHandler.load(*topology_args, **options)
<SYSTEM_TASK:> Given a list of filenames, check which ones are `frcmods`. If so, <END_TASK> <USER_TASK:> Description: def process_forcefield(*forcefields): """ Given a list of filenames, check which ones are `frcmods`. If so, convert them to ffxml. Else, just return them. """
for forcefield in forcefields: if forcefield.endswith('.frcmod'): gaffmol2 = os.path.splitext(forcefield)[0] + '.gaff.mol2' yield create_ffxml_file([gaffmol2], [forcefield]) else: yield forcefield
<SYSTEM_TASK:> Given an OpenMM xml file containing the state of the simulation, <END_TASK> <USER_TASK:> Description: def statexml2pdb(topology, state, output=None): """ Given an OpenMM xml file containing the state of the simulation, generate a PDB snapshot for easy visualization. """
state = Restart.from_xml(state) system = SystemHandler.load(topology, positions=state.positions) if output is None: output = topology + '.pdb' system.write_pdb(output)
<SYSTEM_TASK:> Extract a single frame structure from a trajectory. <END_TASK> <USER_TASK:> Description: def export_frame_coordinates(topology, trajectory, nframe, output=None): """ Extract a single frame structure from a trajectory. """
if output is None: basename, ext = os.path.splitext(trajectory) output = '{}.frame{}.inpcrd'.format(basename, nframe) # ParmEd sometimes struggles with certain PRMTOP files if os.path.splitext(topology)[1] in ('.top', '.prmtop'): top = AmberPrmtopFile(topology) mdtop = mdtraj.Topology.from_openmm(top.topology) traj = mdtraj.load_frame(trajectory, int(nframe), top=mdtop) structure = parmed.openmm.load_topology(top.topology, system=top.createSystem()) structure.box_vectors = top.topology.getPeriodicBoxVectors() else: # standard protocol (the topology is loaded twice, though) traj = mdtraj.load_frame(trajectory, int(nframe), top=topology) structure = parmed.load_file(topology) structure.positions = traj.openmm_positions(0) if traj.unitcell_vectors is not None: # if frame provides box vectors, use those structure.box_vectors = traj.openmm_boxes(0) structure.save(output, overwrite=True)
<SYSTEM_TASK:> Include file referenced at node. <END_TASK> <USER_TASK:> Description: def construct_include(self, node): """Include file referenced at node."""
filename = os.path.join(self._root, self.construct_scalar(node)) filename = os.path.abspath(filename) extension = os.path.splitext(filename)[1].lstrip('.') with open(filename, 'r') as f: if extension in ('yaml', 'yml'): return yaml.load(f, Loader=self) else: return ''.join(f.readlines())
<SYSTEM_TASK:> Loads topology, positions and, potentially, velocities and vectors, <END_TASK> <USER_TASK:> Description: def from_pdb(cls, path, forcefield=None, loader=PDBFile, strict=True, **kwargs): """ Loads topology, positions and, potentially, velocities and vectors, from a PDB or PDBx file Parameters ---------- path : str Path to PDB/PDBx file forcefields : list of str Paths to FFXML and/or FRCMOD forcefields. REQUIRED. Returns ------- pdb : SystemHandler SystemHandler with topology, positions, and, potentially, velocities and box vectors. Forcefields are embedded in the `master` attribute. """
pdb = loader(path) box = kwargs.pop('box', pdb.topology.getPeriodicBoxVectors()) positions = kwargs.pop('positions', pdb.positions) velocities = kwargs.pop('velocities', getattr(pdb, 'velocities', None)) if strict and not forcefield: from .md import FORCEFIELDS as forcefield logger.info('! Forcefields for PDB not specified. Using default: %s', ', '.join(forcefield)) pdb.forcefield = ForceField(*list(process_forcefield(*forcefield))) return cls(master=pdb.forcefield, topology=pdb.topology, positions=positions, velocities=velocities, box=box, path=path, **kwargs)
<SYSTEM_TASK:> Loads Amber Parm7 parameters and topology file <END_TASK> <USER_TASK:> Description: def from_amber(cls, path, positions=None, strict=True, **kwargs): """ Loads Amber Parm7 parameters and topology file Parameters ---------- path : str Path to *.prmtop or *.top file positions : simtk.unit.Quantity Atomic positions Returns ------- prmtop : SystemHandler SystemHandler with topology """
if strict and positions is None: raise ValueError('Amber TOP/PRMTOP files require initial positions.') prmtop = AmberPrmtopFile(path) box = kwargs.pop('box', prmtop.topology.getPeriodicBoxVectors()) return cls(master=prmtop, topology=prmtop.topology, positions=positions, box=box, path=path, **kwargs)
<SYSTEM_TASK:> Loads PSF Charmm structure from `path`. Requires `charmm_parameters`. <END_TASK> <USER_TASK:> Description: def from_charmm(cls, path, positions=None, forcefield=None, strict=True, **kwargs): """ Loads PSF Charmm structure from `path`. Requires `charmm_parameters`. Parameters ---------- path : str Path to PSF file forcefield : list of str Paths to Charmm parameters files, such as *.par or *.str. REQUIRED Returns ------- psf : SystemHandler SystemHandler with topology. Charmm parameters are embedded in the `master` attribute. """
psf = CharmmPsfFile(path) if strict and forcefield is None: raise ValueError('PSF files require key `forcefield`.') if strict and positions is None: raise ValueError('PSF files require key `positions`.') psf.parmset = CharmmParameterSet(*forcefield) psf.loadParameters(psf.parmset) return cls(master=psf, topology=psf.topology, positions=positions, path=path, **kwargs)
<SYSTEM_TASK:> Loads a topology from a Desmond DMS file located at `path`. <END_TASK> <USER_TASK:> Description: def from_desmond(cls, path, **kwargs): """ Loads a topology from a Desmond DMS file located at `path`. Arguments --------- path : str Path to a Desmond DMS file """
dms = DesmondDMSFile(path) pos = kwargs.pop('positions', dms.getPositions()) return cls(master=dms, topology=dms.getTopology(), positions=pos, path=path, **kwargs)
<SYSTEM_TASK:> Loads a topology from a Gromacs TOP file located at `path`. <END_TASK> <USER_TASK:> Description: def from_gromacs(cls, path, positions=None, forcefield=None, strict=True, **kwargs): """ Loads a topology from a Gromacs TOP file located at `path`. Additional root directory for parameters can be specified with `forcefield`. Arguments --------- path : str Path to a Gromacs TOP file positions : simtk.unit.Quantity Atomic positions forcefield : str, optional Root directory for parameter files """
if strict and positions is None: raise ValueError('Gromacs TOP files require initial positions.') box = kwargs.pop('box', None) top = GromacsTopFile(path, includeDir=forcefield, periodicBoxVectors=box) return cls(master=top, topology=top.topology, positions=positions, box=box, path=path, **kwargs)
<SYSTEM_TASK:> Try to load a file automatically with ParmEd. Not guaranteed to work, but <END_TASK> <USER_TASK:> Description: def from_parmed(cls, path, *args, **kwargs): """ Try to load a file automatically with ParmEd. Not guaranteed to work, but might be useful if it succeeds. Arguments --------- path : str Path to file that ParmEd can load """
st = parmed.load_file(path, structure=True, *args, **kwargs) box = kwargs.pop('box', getattr(st, 'box', None)) velocities = kwargs.pop('velocities', getattr(st, 'velocities', None)) positions = kwargs.pop('positions', getattr(st, 'positions', None)) return cls(master=st, topology=st.topology, positions=positions, box=box, velocities=velocities, path=path, **kwargs)
<SYSTEM_TASK:> Loads pickled topology. Careful with Python versions though! <END_TASK> <USER_TASK:> Description: def _pickle_load(path): """ Loads pickled topology. Careful with Python versions though! """
_, ext = os.path.splitext(path) topology = None if sys.version_info.major == 2: if ext == '.pickle2': with open(path, 'rb') as f: topology = pickle.load(f) elif ext in ('.pickle3', '.pickle'): with open(path, 'rb') as f: topology = pickle.load(f, protocol=3) elif sys.version_info.major == 3: if ext == '.pickle2': with open(path, 'rb') as f: topology = pickle.load(f) elif ext in ('.pickle3', '.pickle'): with open(path, 'rb') as f: topology = pickle.load(f) if topology is None: raise ValueError('File {} is not compatible with this version'.format(path)) return topology
<SYSTEM_TASK:> Create an OpenMM system for every supported topology file with given system options <END_TASK> <USER_TASK:> Description: def create_system(self, **system_options): """ Create an OpenMM system for every supported topology file with given system options """
if self.master is None: raise ValueError('Handler {} is not able to create systems.'.format(self)) if isinstance(self.master, ForceField): system = self.master.createSystem(self.topology, **system_options) elif isinstance(self.master, (AmberPrmtopFile, GromacsTopFile, DesmondDMSFile)): system = self.master.createSystem(**system_options) elif isinstance(self.master, CharmmPsfFile): if not hasattr(self.master, 'parmset'): raise ValueError('PSF topology files require Charmm parameters.') system = self.master.createSystem(self.master.parmset, **system_options) else: raise NotImplementedError('Handler {} is not able to create systems.'.format(self)) if self.has_box: system.setDefaultPeriodicBoxVectors(*self.box) return system
<SYSTEM_TASK:> Outputs a PDB file with the current contents of the system <END_TASK> <USER_TASK:> Description: def write_pdb(self, path): """ Outputs a PDB file with the current contents of the system """
if self.master is None and self.positions is None: raise ValueError('Topology and positions are needed to write output files.') with open(path, 'w') as f: PDBFile.writeFile(self.topology, self.positions, f)
<SYSTEM_TASK:> Returns u.Quantity with box vectors from XSC file <END_TASK> <USER_TASK:> Description: def from_xsc(cls, path): """ Returns u.Quantity with box vectors from XSC file """
def parse(path): """ Open and parses an XSC file into its fields Parameters ---------- path : str Path to XSC file Returns ------- namedxsc : namedtuple A namedtuple with XSC fields as names """ with open(path) as f: lines = f.readlines() NamedXsc = namedtuple('NamedXsc', lines[1].split()[1:]) return NamedXsc(*map(float, lines[2].split())) xsc = parse(path) return u.Quantity([[xsc.a_x, xsc.a_y, xsc.a_z], [xsc.b_x, xsc.b_y, xsc.b_z], [xsc.c_x, xsc.c_y, xsc.c_z]], unit=u.angstroms)
<SYSTEM_TASK:> Get box vectors from comma-separated values in file `path`. <END_TASK> <USER_TASK:> Description: def from_csv(cls, path): """ Get box vectors from comma-separated values in file `path`. The csv file must containt only one line, which in turn can contain three values (orthogonal vectors) or nine values (triclinic box). The values should be in nanometers. Parameters ---------- path : str Path to CSV file Returns ------- vectors : simtk.unit.Quantity([3, 3], unit=nanometers """
with open(path) as f: fields = map(float, next(f).split(',')) if len(fields) == 3: return u.Quantity([[fields[0], 0, 0], [0, fields[1], 0], [0, 0, fields[2]]], unit=u.nanometers) elif len(fields) == 9: return u.Quantity([fields[0:3], fields[3:6], fields[6:9]], unit=u.nanometers) else: raise ValueError('This type of CSV is not supported. Please ' 'provide a comma-separated list of three or nine ' 'floats in a single-line file.')
<SYSTEM_TASK:> Get information about the next report this object will generate. <END_TASK> <USER_TASK:> Description: def describeNextReport(self, simulation): """Get information about the next report this object will generate. Parameters ---------- simulation : Simulation The Simulation to generate a report for Returns ------- tuple A five element tuple. The first element is the number of steps until the next report. The remaining elements specify whether that report will require positions, velocities, forces, and energies respectively. """
steps = self.interval - simulation.currentStep % self.interval return steps, False, False, False, False
<SYSTEM_TASK:> If path exists, modify to add a counter in the filename. Useful <END_TASK> <USER_TASK:> Description: def assert_not_exists(path, sep='.'): """ If path exists, modify to add a counter in the filename. Useful for preventing accidental overrides. For example, if `file.txt` exists, check if `file.1.txt` also exists. Repeat until we find a non-existing version, such as `file.12.txt`. Parameters ---------- path : str Path to be checked Returns ------- newpath : str A modified version of path with a counter right before the extension. """
name, ext = os.path.splitext(path) i = 1 while os.path.exists(path): path = '{}{}{}{}'.format(name, sep, i, ext) i += 1 return path
<SYSTEM_TASK:> Make sure object `obj` is of type `types`. Else, raise TypeError. <END_TASK> <USER_TASK:> Description: def assertinstance(obj, types): """ Make sure object `obj` is of type `types`. Else, raise TypeError. """
if isinstance(obj, types): return obj raise TypeError('{} must be instance of {}'.format(obj, types))
<SYSTEM_TASK:> Check if file exists with argparse <END_TASK> <USER_TASK:> Description: def extant_file(path): """ Check if file exists with argparse """
if not os.path.exists(path): raise argparse.ArgumentTypeError("{} does not exist".format(path)) return path
<SYSTEM_TASK:> Sort files taking into account potentially absent suffixes like <END_TASK> <USER_TASK:> Description: def sort_key_for_numeric_suffixes(path, sep='.', suffix_index=-2): """ Sort files taking into account potentially absent suffixes like somefile.dcd somefile.1000.dcd somefile.2000.dcd To be used with sorted(..., key=callable). """
chunks = path.split(sep) # Remove suffix from path and convert to int if chunks[suffix_index].isdigit(): return sep.join(chunks[:suffix_index] + chunks[suffix_index+1:]), int(chunks[suffix_index]) return path, 0
<SYSTEM_TASK:> Wrap a view in JSON. <END_TASK> <USER_TASK:> Description: def json_response(f, *args, **kwargs): """Wrap a view in JSON. This decorator runs the given function and looks out for ajax.AJAXError's, which it encodes into a proper HttpResponse object. If an unknown error is thrown it's encoded as a 500. All errors are then packaged up with an appropriate Content-Type and a JSON body that you can inspect in JavaScript on the client. They look like: { "message": "Error message here.", "code": 500 } Please keep in mind that raw exception messages could very well be exposed to the client if a non-AJAXError is thrown. """
try: result = f(*args, **kwargs) if isinstance(result, AJAXError): raise result except AJAXError as e: result = e.get_response() request = args[0] logger.warn('AJAXError: %d %s - %s', e.code, request.path, e.msg, exc_info=True, extra={ 'status_code': e.code, 'request': request } ) except Http404 as e: result = AJAXError(404, e.__str__()).get_response() except Exception as e: import sys exc_info = sys.exc_info() type, message, trace = exc_info if settings.DEBUG: import traceback tb = [{'file': l[0], 'line': l[1], 'in': l[2], 'code': l[3]} for l in traceback.extract_tb(trace)] result = AJAXError(500, message, traceback=tb).get_response() else: result = AJAXError(500, "Internal server error.").get_response() request = args[0] logger.error('Internal Server Error: %s' % request.path, exc_info=exc_info, extra={ 'status_code': 500, 'request': request } ) result['Content-Type'] = 'application/json' return result
<SYSTEM_TASK:> Load an AJAX endpoint. <END_TASK> <USER_TASK:> Description: def endpoint_loader(request, application, model, **kwargs): """Load an AJAX endpoint. This will load either an ad-hoc endpoint or it will load up a model endpoint depending on what it finds. It first attempts to load ``model`` as if it were an ad-hoc endpoint. Alternatively, it will attempt to see if there is a ``ModelEndpoint`` for the given ``model``. """
if request.method != "POST": raise AJAXError(400, _('Invalid HTTP method used.')) try: module = import_module('%s.endpoints' % application) except ImportError as e: if settings.DEBUG: raise e else: raise AJAXError(404, _('AJAX endpoint does not exist.')) if hasattr(module, model): # This is an ad-hoc endpoint endpoint = getattr(module, model) else: # This is a model endpoint method = kwargs.get('method', 'create').lower() try: del kwargs['method'] except: pass try: model_endpoint = ajax.endpoint.load(model, application, method, **kwargs) if not model_endpoint.authenticate(request, application, method): raise AJAXError(403, _('User is not authorized.')) endpoint = getattr(model_endpoint, method, False) if not endpoint: raise AJAXError(404, _('Invalid method.')) except NotRegistered: raise AJAXError(500, _('Invalid model.')) data = endpoint(request) if isinstance(data, HttpResponse): return data if isinstance(data, EnvelopedResponse): envelope = data.metadata payload = data.data else: envelope = {} payload = data envelope.update({ 'success': True, 'data': payload, }) return HttpResponse(json.dumps(envelope, cls=DjangoJSONEncoder, separators=(',', ':')))
<SYSTEM_TASK:> List objects of a model. By default will show page 1 with 20 objects on it. <END_TASK> <USER_TASK:> Description: def list(self, request): """ List objects of a model. By default will show page 1 with 20 objects on it. **Usage**:: params = {"items_per_page":10,"page":2} //all params are optional $.post("/ajax/{app}/{model}/list.json"),params) """
max_items_per_page = getattr(self, 'max_per_page', getattr(settings, 'AJAX_MAX_PER_PAGE', 100)) requested_items_per_page = request.POST.get("items_per_page", 20) items_per_page = min(max_items_per_page, requested_items_per_page) current_page = request.POST.get("current_page", 1) if not self.can_list(request.user): raise AJAXError(403, _("Access to this endpoint is forbidden")) objects = self.get_queryset(request) paginator = Paginator(objects, items_per_page) try: page = paginator.page(current_page) except PageNotAnInteger: # If page is not an integer, deliver first page. page = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), return empty list. page = EmptyPageResult() data = [encoder.encode(record) for record in page.object_list] return EnvelopedResponse(data=data, metadata={'total': paginator.count})
<SYSTEM_TASK:> Extract data from POST. <END_TASK> <USER_TASK:> Description: def _extract_data(self, request): """Extract data from POST. Handles extracting a vanilla Python dict of values that are present in the given model. This also handles instances of ``ForeignKey`` and will convert those to the appropriate object instances from the database. In other words, it will see that user is a ``ForeignKey`` to Django's ``User`` class, assume the value is an appropriate pk, and load up that record. """
data = {} for field, val in six.iteritems(request.POST): if field in self.immutable_fields: continue # Ignore immutable fields silently. if field in self.fields: field_obj = self.model._meta.get_field(field) val = self._extract_value(val) if isinstance(field_obj, models.ForeignKey): if field_obj.null and not val: clean_value = None else: clean_value = field_obj.rel.to.objects.get(pk=val) else: clean_value = field_obj.to_python(val) data[smart_str(field)] = clean_value return data
<SYSTEM_TASK:> Fetch a given record. <END_TASK> <USER_TASK:> Description: def _get_record(self): """Fetch a given record. Handles fetching a record from the database along with throwing an appropriate instance of ``AJAXError`. """
if not self.pk: raise AJAXError(400, _('Invalid request for record.')) try: return self.model.objects.get(pk=self.pk) except self.model.DoesNotExist: raise AJAXError(404, _('%s with id of "%s" not found.') % ( self.model.__name__, self.pk))
<SYSTEM_TASK:> Authenticate the AJAX request. <END_TASK> <USER_TASK:> Description: def authenticate(self, request, application, method): """Authenticate the AJAX request. By default any request to fetch a model is allowed for any user, including anonymous users. All other methods minimally require that the user is already logged in. Most likely you will want to lock down who can edit and delete various models. To do this, just override this method in your child class. """
return self.authentication.is_authenticated(request, application, method)
<SYSTEM_TASK:> Gets a single entry by ID. <END_TASK> <USER_TASK:> Description: def find(self, entry_id, query=None): """ Gets a single entry by ID. """
if query is None: query = {} if self.content_type_id is not None: query['content_type'] = self.content_type_id normalize_select(query) return super(EntriesProxy, self).find(entry_id, query=query)
<SYSTEM_TASK:> Creates a webhook with given attributes. <END_TASK> <USER_TASK:> Description: def create(self, attributes=None, **kwargs): """ Creates a webhook with given attributes. """
return super(WebhooksProxy, self).create(resource_id=None, attributes=attributes)
<SYSTEM_TASK:> Returns a camel-cased version of a string. <END_TASK> <USER_TASK:> Description: def camel_case(snake_str): """ Returns a camel-cased version of a string. :param a_string: any :class:`str` object. Usage: >>> camel_case('foo_bar') "fooBar" """
components = snake_str.split('_') # We capitalize the first letter of each component except the first one # with the 'title' method and join them together. return components[0] + "".join(x.title() for x in components[1:])
<SYSTEM_TASK:> Returns the JSON representation of the environment. <END_TASK> <USER_TASK:> Description: def to_json(self): """ Returns the JSON representation of the environment. """
result = super(Environment, self).to_json() result.update({ 'name': self.name }) return result
<SYSTEM_TASK:> Provides access to content type management methods for content types of an environment. <END_TASK> <USER_TASK:> Description: def content_types(self): """ Provides access to content type management methods for content types of an environment. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/content-types :return: :class:`EnvironmentContentTypesProxy <contentful_management.space_content_types_proxy.EnvironmentContentTypesProxy>` object. :rtype: contentful.space_content_types_proxy.EnvironmentContentTypesProxy Usage: >>> space_content_types_proxy = environment.content_types() <EnvironmentContentTypesProxy space_id="cfexampleapi" environment_id="master"> """
return EnvironmentContentTypesProxy(self._client, self.space.id, self.id)
<SYSTEM_TASK:> Provides access to asset management methods. <END_TASK> <USER_TASK:> Description: def assets(self): """ Provides access to asset management methods. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/assets :return: :class:`EnvironmentAssetsProxy <contentful_management.environment_assets_proxy.EnvironmentAssetsProxy>` object. :rtype: contentful.environment_assets_proxy.EnvironmentAssetsProxy Usage: >>> environment_assets_proxy = environment.assets() <EnvironmentAssetsProxy space_id="cfexampleapi" environment_id="master"> """
return EnvironmentAssetsProxy(self._client, self.space.id, self.id)
<SYSTEM_TASK:> Provides access to locale management methods. <END_TASK> <USER_TASK:> Description: def locales(self): """ Provides access to locale management methods. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/locales :return: :class:`EnvironmentLocalesProxy <contentful_management.environment_locales_proxy.EnvironmentLocalesProxy>` object. :rtype: contentful.environment_locales_proxy.EnvironmentLocalesProxy Usage: >>> environment_locales_proxy = environment.locales() <EnvironmentLocalesProxy space_id="cfexampleapi" environment_id="master"> """
return EnvironmentLocalesProxy(self._client, self.space.id, self.id)
<SYSTEM_TASK:> Get fields with locales per field. <END_TASK> <USER_TASK:> Description: def fields_with_locales(self): """ Get fields with locales per field. """
result = {} for locale, fields in self._fields.items(): for k, v in fields.items(): real_field_id = self._real_field_id_for(k) if real_field_id not in result: result[real_field_id] = {} result[real_field_id][locale] = self._serialize_value(v) return result
<SYSTEM_TASK:> Checks if a resource has been updated since last publish. <END_TASK> <USER_TASK:> Description: def is_updated(self): """ Checks if a resource has been updated since last publish. Returns False if resource has not been published before. """
if not self.is_published: return False return sanitize_date(self.sys['published_at']) < sanitize_date(self.sys['updated_at'])
<SYSTEM_TASK:> Unpublishes the resource. <END_TASK> <USER_TASK:> Description: def unpublish(self): """ Unpublishes the resource. """
self._client._delete( "{0}/published".format( self.__class__.base_url( self.sys['space'].id, self.sys['id'], environment_id=self._environment_id ), ), headers=self._update_headers() ) return self.reload()
<SYSTEM_TASK:> Resolves link to a specific resource. <END_TASK> <USER_TASK:> Description: def resolve(self, space_id=None, environment_id=None): """ Resolves link to a specific resource. """
proxy_method = getattr( self._client, base_path_for(self.link_type) ) if self.link_type == 'Space': return proxy_method().find(self.id) elif environment_id is not None: return proxy_method(space_id, environment_id).find(self.id) else: return proxy_method(space_id).find(self.id)
<SYSTEM_TASK:> Calls the process endpoint for all locales of the asset. <END_TASK> <USER_TASK:> Description: def process(self): """ Calls the process endpoint for all locales of the asset. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/assets/asset-processing """
for locale in self._fields.keys(): self._client._put( "{0}/files/{1}/process".format( self.__class__.base_url( self.space.id, self.id, environment_id=self._environment_id ), locale ), {}, headers=self._update_headers() ) return self.reload()
<SYSTEM_TASK:> Gets all api usages by type for a given period an api. <END_TASK> <USER_TASK:> Description: def all(self, usage_type, usage_period_id, api, query=None, *args, **kwargs): """ Gets all api usages by type for a given period an api. """
if query is None: query = {} mandatory_query = { 'filters[usagePeriod]': usage_period_id, 'filters[metric]': api } mandatory_query.update(query) return self.client._get( self._url(usage_type), mandatory_query, headers={ 'x-contentful-enable-alpha-feature': 'usage-insights' } )
<SYSTEM_TASK:> Returns the URI for the content type. <END_TASK> <USER_TASK:> Description: def base_url(klass, space_id, resource_id=None, public=False, environment_id=None, **kwargs): """ Returns the URI for the content type. """
if public: environment_slug = "" if environment_id is not None: environment_slug = "/environments/{0}".format(environment_id) return "spaces/{0}{1}/public/content_types".format(space_id, environment_slug) return super(ContentType, klass).base_url( space_id, resource_id=resource_id, environment_id=environment_id, **kwargs )
<SYSTEM_TASK:> Attributes for content type creation. <END_TASK> <USER_TASK:> Description: def create_attributes(klass, attributes, previous_object=None): """ Attributes for content type creation. """
result = super(ContentType, klass).create_attributes(attributes, previous_object) if 'fields' not in result: result['fields'] = [] return result
<SYSTEM_TASK:> Returns the JSON representation of the content type. <END_TASK> <USER_TASK:> Description: def to_json(self): """ Returns the JSON representation of the content type. """
result = super(ContentType, self).to_json() result.update({ 'name': self.name, 'description': self.description, 'displayField': self.display_field, 'fields': [f.to_json() for f in self.fields] }) return result
<SYSTEM_TASK:> Provides access to editor interface management methods for the given content type. <END_TASK> <USER_TASK:> Description: def editor_interfaces(self): """ Provides access to editor interface management methods for the given content type. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/editor-interface :return: :class:`ContentTypeEditorInterfacesProxy <contentful_management.content_type_editor_interfaces_proxy.ContentTypeEditorInterfacesProxy>` object. :rtype: contentful.content_type_editor_interfaces_proxy.ContentTypeEditorInterfacesProxy Usage: >>> content_type_editor_interfaces_proxy = content_type.editor_interfaces() <ContentTypeEditorInterfacesProxy space_id="cfexampleapi" environment_id="master" content_type_id="cat"> """
return ContentTypeEditorInterfacesProxy(self._client, self.space.id, self._environment_id, self.id)
<SYSTEM_TASK:> Provides access to snapshot management methods for the given content type. <END_TASK> <USER_TASK:> Description: def snapshots(self): """ Provides access to snapshot management methods for the given content type. API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/snapshots/content-type-snapshots-collection :return: :class:`ContentTypeSnapshotsProxy <contentful_management.content_type_snapshots_proxy.ContentTypeSnapshotsProxy>` object. :rtype: contentful.content_type_snapshots_proxy.ContentTypeSnapshotsProxy Usage: >>> content_type_snapshots_proxy = content_type.entries() <ContentTypeSnapshotsProxy space_id="cfexampleapi" environment_id="master" content_type_id="cat"> """
return ContentTypeSnapshotsProxy(self._client, self.space.id, self._environment_id, self.id)
<SYSTEM_TASK:> Attributes for space creation. <END_TASK> <USER_TASK:> Description: def create_attributes(klass, attributes, previous_object=None): """Attributes for space creation."""
if previous_object is not None: return {'name': attributes.get('name', previous_object.name)} return { 'name': attributes.get('name', ''), 'defaultLocale': attributes['default_locale'] }