text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Internal helper to deserialize this property from a protocol buffer. <END_TASK> <USER_TASK:> Description: def _deserialize(self, entity, p, unused_depth=1): """Internal helper to deserialize this property from a protocol buffer. Subclasses may override this method. Args: entity: The entity, a Model (subclass) instance. p: A Property Message object (a protocol buffer). depth: Optional nesting depth, default 1 (unused here, but used by some subclasses that override this method). """
if p.meaning() == entity_pb.Property.EMPTY_LIST: self._store_value(entity, []) return val = self._db_get_value(p.value(), p) if val is not None: val = _BaseValue(val) # TODO: replace the remainder of the function with the following commented # out code once its feasible to make breaking changes such as not calling # _store_value(). # if self._repeated: # entity._values.setdefault(self._name, []).append(val) # else: # entity._values[self._name] = val if self._repeated: if self._has_value(entity): value = self._retrieve_value(entity) assert isinstance(value, list), repr(value) value.append(val) else: # We promote single values to lists if we are a list property value = [val] else: value = val self._store_value(entity, value)
<SYSTEM_TASK:> Internal helper to check this property for specific requirements. <END_TASK> <USER_TASK:> Description: def _check_property(self, rest=None, require_indexed=True): """Internal helper to check this property for specific requirements. Called by Model._check_properties(). Args: rest: Optional subproperty to check, of the form 'name1.name2...nameN'. Raises: InvalidPropertyError if this property does not meet the given requirements or if a subproperty is specified. (StructuredProperty overrides this method to handle subproperties.) """
if require_indexed and not self._indexed: raise InvalidPropertyError('Property is unindexed %s' % self._name) if rest: raise InvalidPropertyError('Referencing subproperty %s.%s ' 'but %s is not a structured property' % (self._name, rest, self._name))
<SYSTEM_TASK:> Setter for key attribute. <END_TASK> <USER_TASK:> Description: def _set_value(self, entity, value): """Setter for key attribute."""
if value is not None: value = _validate_key(value, entity=entity) value = entity._validate_key(value) entity._entity_key = value
<SYSTEM_TASK:> Internal helper method to parse keywords that may be property names. <END_TASK> <USER_TASK:> Description: def __get_arg(cls, kwds, kwd): """Internal helper method to parse keywords that may be property names."""
alt_kwd = '_' + kwd if alt_kwd in kwds: return kwds.pop(alt_kwd) if kwd in kwds: obj = getattr(cls, kwd, None) if not isinstance(obj, Property) or isinstance(obj, ModelKey): return kwds.pop(kwd) return None
<SYSTEM_TASK:> Internal helper to set attributes from keyword arguments. <END_TASK> <USER_TASK:> Description: def _set_attributes(self, kwds): """Internal helper to set attributes from keyword arguments. Expando overrides this. """
cls = self.__class__ for name, value in kwds.iteritems(): prop = getattr(cls, name) # Raises AttributeError for unknown properties. if not isinstance(prop, Property): raise TypeError('Cannot set non-property %s' % name) prop._set_value(self, value)
<SYSTEM_TASK:> Internal helper to find uninitialized properties. <END_TASK> <USER_TASK:> Description: def _find_uninitialized(self): """Internal helper to find uninitialized properties. Returns: A set of property names. """
return set(name for name, prop in self._properties.iteritems() if not prop._is_initialized(self))
<SYSTEM_TASK:> Internal helper to check for uninitialized properties. <END_TASK> <USER_TASK:> Description: def _check_initialized(self): """Internal helper to check for uninitialized properties. Raises: BadValueError if it finds any. """
baddies = self._find_uninitialized() if baddies: raise datastore_errors.BadValueError( 'Entity has uninitialized properties: %s' % ', '.join(baddies))
<SYSTEM_TASK:> Get the model class for the kind. <END_TASK> <USER_TASK:> Description: def _lookup_model(cls, kind, default_model=None): """Get the model class for the kind. Args: kind: A string representing the name of the kind to lookup. default_model: The model class to use if the kind can't be found. Returns: The model class for the requested kind. Raises: KindError: The kind was not found and no default_model was provided. """
modelclass = cls._kind_map.get(kind, default_model) if modelclass is None: raise KindError( "No model class found for kind '%s'. Did you forget to import it?" % kind) return modelclass
<SYSTEM_TASK:> Compare two entities of the same class, excluding keys. <END_TASK> <USER_TASK:> Description: def _equivalent(self, other): """Compare two entities of the same class, excluding keys."""
if other.__class__ is not self.__class__: # TODO: What about subclasses? raise NotImplementedError('Cannot compare different model classes. ' '%s is not %s' % (self.__class__.__name__, other.__class_.__name__)) if set(self._projection) != set(other._projection): return False # It's all about determining inequality early. if len(self._properties) != len(other._properties): return False # Can only happen for Expandos. my_prop_names = set(self._properties.iterkeys()) their_prop_names = set(other._properties.iterkeys()) if my_prop_names != their_prop_names: return False # Again, only possible for Expandos. if self._projection: my_prop_names = set(self._projection) for name in my_prop_names: if '.' in name: name, _ = name.split('.', 1) my_value = self._properties[name]._get_value(self) their_value = other._properties[name]._get_value(other) if my_value != their_value: return False return True
<SYSTEM_TASK:> Internal helper to turn an entity into an EntityProto protobuf. <END_TASK> <USER_TASK:> Description: def _to_pb(self, pb=None, allow_partial=False, set_key=True): """Internal helper to turn an entity into an EntityProto protobuf."""
if not allow_partial: self._check_initialized() if pb is None: pb = entity_pb.EntityProto() if set_key: # TODO: Move the key stuff into ModelAdapter.entity_to_pb()? self._key_to_pb(pb) for unused_name, prop in sorted(self._properties.iteritems()): prop._serialize(self, pb, projection=self._projection) return pb
<SYSTEM_TASK:> Internal helper to copy the key into a protobuf. <END_TASK> <USER_TASK:> Description: def _key_to_pb(self, pb): """Internal helper to copy the key into a protobuf."""
key = self._key if key is None: pairs = [(self._get_kind(), None)] ref = key_module._ReferenceFromPairs(pairs, reference=pb.mutable_key()) else: ref = key.reference() pb.mutable_key().CopyFrom(ref) group = pb.mutable_entity_group() # Must initialize this. # To work around an SDK issue, only set the entity group if the # full key is complete. TODO: Remove the top test once fixed. if key is not None and key.id(): elem = ref.path().element(0) if elem.id() or elem.name(): group.add_element().CopyFrom(elem)
<SYSTEM_TASK:> Internal helper to create an entity from an EntityProto protobuf. <END_TASK> <USER_TASK:> Description: def _from_pb(cls, pb, set_key=True, ent=None, key=None): """Internal helper to create an entity from an EntityProto protobuf."""
if not isinstance(pb, entity_pb.EntityProto): raise TypeError('pb must be a EntityProto; received %r' % pb) if ent is None: ent = cls() # A key passed in overrides a key in the pb. if key is None and pb.key().path().element_size(): key = Key(reference=pb.key()) # If set_key is not set, skip a trivial incomplete key. if key is not None and (set_key or key.id() or key.parent()): ent._key = key # NOTE(darke): Keep a map from (indexed, property name) to the property. # This allows us to skip the (relatively) expensive call to # _get_property_for for repeated fields. _property_map = {} projection = [] for indexed, plist in ((True, pb.property_list()), (False, pb.raw_property_list())): for p in plist: if p.meaning() == entity_pb.Property.INDEX_VALUE: projection.append(p.name()) property_map_key = (p.name(), indexed) if property_map_key not in _property_map: _property_map[property_map_key] = ent._get_property_for(p, indexed) _property_map[property_map_key]._deserialize(ent, p) ent._set_projection(projection) return ent
<SYSTEM_TASK:> Internal helper to get the Property for a protobuf-level property. <END_TASK> <USER_TASK:> Description: def _get_property_for(self, p, indexed=True, depth=0): """Internal helper to get the Property for a protobuf-level property."""
parts = p.name().split('.') if len(parts) <= depth: # Apparently there's an unstructured value here. # Assume it is a None written for a missing value. # (It could also be that a schema change turned an unstructured # value into a structured one. In that case, too, it seems # better to return None than to return an unstructured value, # since the latter doesn't match the current schema.) return None next = parts[depth] prop = self._properties.get(next) if prop is None: prop = self._fake_property(p, next, indexed) return prop
<SYSTEM_TASK:> Internal helper to clone self._properties if necessary. <END_TASK> <USER_TASK:> Description: def _clone_properties(self): """Internal helper to clone self._properties if necessary."""
cls = self.__class__ if self._properties is cls._properties: self._properties = dict(cls._properties)
<SYSTEM_TASK:> Internal helper to create a fake Property. <END_TASK> <USER_TASK:> Description: def _fake_property(self, p, next, indexed=True): """Internal helper to create a fake Property."""
self._clone_properties() if p.name() != next and not p.name().endswith('.' + next): prop = StructuredProperty(Expando, next) prop._store_value(self, _BaseValue(Expando())) else: compressed = p.meaning_uri() == _MEANING_URI_COMPRESSED prop = GenericProperty(next, repeated=p.multiple(), indexed=indexed, compressed=compressed) prop._code_name = next self._properties[prop._name] = prop return prop
<SYSTEM_TASK:> Return a dict containing the entity's property values. <END_TASK> <USER_TASK:> Description: def _to_dict(self, include=None, exclude=None): """Return a dict containing the entity's property values. Args: include: Optional set of property names to include, default all. exclude: Optional set of property names to skip, default none. A name contained in both include and exclude is excluded. """
if (include is not None and not isinstance(include, (list, tuple, set, frozenset))): raise TypeError('include should be a list, tuple or set') if (exclude is not None and not isinstance(exclude, (list, tuple, set, frozenset))): raise TypeError('exclude should be a list, tuple or set') values = {} for prop in self._properties.itervalues(): name = prop._code_name if include is not None and name not in include: continue if exclude is not None and name in exclude: continue try: values[name] = prop._get_for_dict(self) except UnprojectedPropertyError: pass # Ignore unprojected properties rather than failing. return values
<SYSTEM_TASK:> Internal helper to check the given properties exist and meet specified <END_TASK> <USER_TASK:> Description: def _check_properties(cls, property_names, require_indexed=True): """Internal helper to check the given properties exist and meet specified requirements. Called from query.py. Args: property_names: List or tuple of property names -- each being a string, possibly containing dots (to address subproperties of structured properties). Raises: InvalidPropertyError if one of the properties is invalid. AssertionError if the argument is not a list or tuple of strings. """
assert isinstance(property_names, (list, tuple)), repr(property_names) for name in property_names: assert isinstance(name, basestring), repr(name) if '.' in name: name, rest = name.split('.', 1) else: rest = None prop = cls._properties.get(name) if prop is None: cls._unknown_property(name) else: prop._check_property(rest, require_indexed=require_indexed)
<SYSTEM_TASK:> Create a Query object for this class. <END_TASK> <USER_TASK:> Description: def _query(cls, *args, **kwds): """Create a Query object for this class. Args: distinct: Optional bool, short hand for group_by = projection. *args: Used to apply an initial filter **kwds: are passed to the Query() constructor. Returns: A Query object. """
# Validating distinct. if 'distinct' in kwds: if 'group_by' in kwds: raise TypeError( 'cannot use distinct= and group_by= at the same time') projection = kwds.get('projection') if not projection: raise TypeError( 'cannot use distinct= without projection=') if kwds.pop('distinct'): kwds['group_by'] = projection # TODO: Disallow non-empty args and filter=. from .query import Query # Import late to avoid circular imports. qry = Query(kind=cls._get_kind(), **kwds) qry = qry.filter(*cls._default_filters()) qry = qry.filter(*args) return qry
<SYSTEM_TASK:> Write this entity to Cloud Datastore. <END_TASK> <USER_TASK:> Description: def _put_async(self, **ctx_options): """Write this entity to Cloud Datastore. This is the asynchronous version of Model._put(). """
if self._projection: raise datastore_errors.BadRequestError('Cannot put a partial entity') from . import tasklets ctx = tasklets.get_context() self._prepare_for_put() if self._key is None: self._key = Key(self._get_kind(), None) self._pre_put_hook() fut = ctx.put(self, **ctx_options) post_hook = self._post_put_hook if not self._is_default_hook(Model._default_post_put_hook, post_hook): fut.add_immediate_callback(post_hook, fut) return fut
<SYSTEM_TASK:> Returns an instance of Model class by ID. <END_TASK> <USER_TASK:> Description: def _get_by_id(cls, id, parent=None, **ctx_options): """Returns an instance of Model class by ID. This is really just a shorthand for Key(cls, id, ...).get(). Args: id: A string or integer key ID. parent: Optional parent key of the model to get. namespace: Optional namespace. app: Optional app ID. **ctx_options: Context options. Returns: A model instance or None if not found. """
return cls._get_by_id_async(id, parent=parent, **ctx_options).get_result()
<SYSTEM_TASK:> Checks whether a specific hook is in its default state. <END_TASK> <USER_TASK:> Description: def _is_default_hook(default_hook, hook): """Checks whether a specific hook is in its default state. Args: cls: A ndb.model.Model class. default_hook: Callable specified by ndb internally (do not override). hook: The hook defined by a model class using _post_*_hook. Raises: TypeError if either the default hook or the tested hook are not callable. """
if not hasattr(default_hook, '__call__'): raise TypeError('Default hooks for ndb.model.Model must be callable') if not hasattr(hook, '__call__'): raise TypeError('Hooks must be callable') return default_hook.im_func is hook.im_func
<SYSTEM_TASK:> Decode a url-safe base64-encoded string. <END_TASK> <USER_TASK:> Description: def _DecodeUrlSafe(urlsafe): """Decode a url-safe base64-encoded string. This returns the decoded string. """
if not isinstance(urlsafe, basestring): raise TypeError('urlsafe must be a string; received %r' % urlsafe) if isinstance(urlsafe, unicode): urlsafe = urlsafe.encode('utf8') mod = len(urlsafe) % 4 if mod: urlsafe += '=' * (4 - mod) # This is 3-4x faster than urlsafe_b64decode() return base64.b64decode(urlsafe.replace('-', '+').replace('_', '/'))
<SYSTEM_TASK:> Return a tuple of alternating kind and id values. <END_TASK> <USER_TASK:> Description: def flat(self): """Return a tuple of alternating kind and id values."""
flat = [] for kind, id in self.__pairs: flat.append(kind) flat.append(id) return tuple(flat)
<SYSTEM_TASK:> Return the Reference object for this Key. <END_TASK> <USER_TASK:> Description: def reference(self): """Return the Reference object for this Key. This is a entity_pb.Reference instance -- a protocol buffer class used by the lower-level API to the datastore. NOTE: The caller should not mutate the return value. """
if self.__reference is None: self.__reference = _ConstructReference(self.__class__, pairs=self.__pairs, app=self.__app, namespace=self.__namespace) return self.__reference
<SYSTEM_TASK:> Return a url-safe string encoding this Key's Reference. <END_TASK> <USER_TASK:> Description: def urlsafe(self): """Return a url-safe string encoding this Key's Reference. This string is compatible with other APIs and languages and with the strings used to represent Keys in GQL and in the App Engine Admin Console. """
# This is 3-4x faster than urlsafe_b64decode() urlsafe = base64.b64encode(self.reference().Encode()) return urlsafe.rstrip('=').replace('+', '-').replace('/', '_')
<SYSTEM_TASK:> Return a Future whose result is the entity for this Key. <END_TASK> <USER_TASK:> Description: def get_async(self, **ctx_options): """Return a Future whose result is the entity for this Key. If no such entity exists, a Future is still returned, and the Future's eventual return result be None. """
from . import model, tasklets ctx = tasklets.get_context() cls = model.Model._kind_map.get(self.kind()) if cls: cls._pre_get_hook(self) fut = ctx.get(self, **ctx_options) if cls: post_hook = cls._post_get_hook if not cls._is_default_hook(model.Model._default_post_get_hook, post_hook): fut.add_immediate_callback(post_hook, self, fut) return fut
<SYSTEM_TASK:> Schedule deletion of the entity for this Key. <END_TASK> <USER_TASK:> Description: def delete_async(self, **ctx_options): """Schedule deletion of the entity for this Key. This returns a Future, whose result becomes available once the deletion is complete. If no such entity exists, a Future is still returned. In all cases the Future's result is None (i.e. there is no way to tell whether the entity existed or not). """
from . import tasklets, model ctx = tasklets.get_context() cls = model.Model._kind_map.get(self.kind()) if cls: cls._pre_delete_hook(self) fut = ctx.delete(self, **ctx_options) if cls: post_hook = cls._post_delete_hook if not cls._is_default_hook(model.Model._default_post_delete_hook, post_hook): fut.add_immediate_callback(post_hook, self, fut) return fut
<SYSTEM_TASK:> Add an exception that should not be logged. <END_TASK> <USER_TASK:> Description: def add_flow_exception(exc): """Add an exception that should not be logged. The argument must be a subclass of Exception. """
global _flow_exceptions if not isinstance(exc, type) or not issubclass(exc, Exception): raise TypeError('Expected an Exception subclass, got %r' % (exc,)) as_set = set(_flow_exceptions) as_set.add(exc) _flow_exceptions = tuple(as_set)
<SYSTEM_TASK:> Internal helper to initialize _flow_exceptions. <END_TASK> <USER_TASK:> Description: def _init_flow_exceptions(): """Internal helper to initialize _flow_exceptions. This automatically adds webob.exc.HTTPException, if it can be imported. """
global _flow_exceptions _flow_exceptions = () add_flow_exception(datastore_errors.Rollback) try: from webob import exc except ImportError: pass else: add_flow_exception(exc.HTTPException)
<SYSTEM_TASK:> Public function to sleep some time. <END_TASK> <USER_TASK:> Description: def sleep(dt): """Public function to sleep some time. Example: yield tasklets.sleep(0.5) # Sleep for half a sec. """
fut = Future('sleep(%.3f)' % dt) eventloop.queue_call(dt, fut.set_result, None) return fut
<SYSTEM_TASK:> Helper to transfer result or errors from one Future to another. <END_TASK> <USER_TASK:> Description: def _transfer_result(fut1, fut2): """Helper to transfer result or errors from one Future to another."""
exc = fut1.get_exception() if exc is not None: tb = fut1.get_traceback() fut2.set_exception(exc, tb) else: val = fut1.get_result() fut2.set_result(val)
<SYSTEM_TASK:> Decorator to run a function as a tasklet when called. <END_TASK> <USER_TASK:> Description: def synctasklet(func): """Decorator to run a function as a tasklet when called. Use this to wrap a request handler function that will be called by some web application framework (e.g. a Django view function or a webapp.RequestHandler.get method). """
taskletfunc = tasklet(func) # wrap at declaration time. @utils.wrapping(func) def synctasklet_wrapper(*args, **kwds): # pylint: disable=invalid-name __ndb_debug__ = utils.func_info(func) return taskletfunc(*args, **kwds).get_result() return synctasklet_wrapper
<SYSTEM_TASK:> A sync tasklet that sets a fresh default Context. <END_TASK> <USER_TASK:> Description: def toplevel(func): """A sync tasklet that sets a fresh default Context. Use this for toplevel view functions such as webapp.RequestHandler.get() or Django view functions. """
synctaskletfunc = synctasklet(func) # wrap at declaration time. @utils.wrapping(func) def add_context_wrapper(*args, **kwds): # pylint: disable=invalid-name __ndb_debug__ = utils.func_info(func) _state.clear_all_pending() # Create and install a new context. ctx = make_default_context() try: set_context(ctx) return synctaskletfunc(*args, **kwds) finally: set_context(None) ctx.flush().check_success() eventloop.run() # Ensure writes are flushed, etc. return add_context_wrapper
<SYSTEM_TASK:> Creates a new context to connect to a remote Cloud Datastore instance. <END_TASK> <USER_TASK:> Description: def _make_cloud_datastore_context(app_id, external_app_ids=()): """Creates a new context to connect to a remote Cloud Datastore instance. This should only be used outside of Google App Engine. Args: app_id: The application id to connect to. This differs from the project id as it may have an additional prefix, e.g. "s~" or "e~". external_app_ids: A list of apps that may be referenced by data in your application. For example, if you are connected to s~my-app and store keys for s~my-other-app, you should include s~my-other-app in the external_apps list. Returns: An ndb.Context that can connect to a Remote Cloud Datastore. You can use this context by passing it to ndb.set_context. """
from . import model # Late import to deal with circular imports. # Late import since it might not exist. if not datastore_pbs._CLOUD_DATASTORE_ENABLED: raise datastore_errors.BadArgumentError( datastore_pbs.MISSING_CLOUD_DATASTORE_MESSAGE) import googledatastore try: from google.appengine.datastore import cloud_datastore_v1_remote_stub except ImportError: from google3.apphosting.datastore import cloud_datastore_v1_remote_stub current_app_id = os.environ.get('APPLICATION_ID', None) if current_app_id and current_app_id != app_id: # TODO(pcostello): We should support this so users can connect to different # applications. raise ValueError('Cannot create a Cloud Datastore context that connects ' 'to an application (%s) that differs from the application ' 'already connected to (%s).' % (app_id, current_app_id)) os.environ['APPLICATION_ID'] = app_id id_resolver = datastore_pbs.IdResolver((app_id,) + tuple(external_app_ids)) project_id = id_resolver.resolve_project_id(app_id) endpoint = googledatastore.helper.get_project_endpoint_from_env(project_id) datastore = googledatastore.Datastore( project_endpoint=endpoint, credentials=googledatastore.helper.get_credentials_from_env()) conn = model.make_connection(_api_version=datastore_rpc._CLOUD_DATASTORE_V1, _id_resolver=id_resolver) # If necessary, install the stubs try: stub = cloud_datastore_v1_remote_stub.CloudDatastoreV1RemoteStub(datastore) apiproxy_stub_map.apiproxy.RegisterStub(datastore_rpc._CLOUD_DATASTORE_V1, stub) except: pass # The stub is already installed. # TODO(pcostello): Ensure the current stub is connected to the right project. # Install a memcache and taskqueue stub which throws on everything. try: apiproxy_stub_map.apiproxy.RegisterStub('memcache', _ThrowingStub()) except: pass # The stub is already installed. try: apiproxy_stub_map.apiproxy.RegisterStub('taskqueue', _ThrowingStub()) except: pass # The stub is already installed. return make_context(conn=conn)
<SYSTEM_TASK:> Internal helper to check a list of indexed fields. <END_TASK> <USER_TASK:> Description: def _analyze_indexed_fields(indexed_fields): """Internal helper to check a list of indexed fields. Args: indexed_fields: A list of names, possibly dotted names. (A dotted name is a string containing names separated by dots, e.g. 'foo.bar.baz'. An undotted name is a string containing no dots, e.g. 'foo'.) Returns: A dict whose keys are undotted names. For each undotted name in the argument, the dict contains that undotted name as a key with None as a value. For each dotted name in the argument, the dict contains the first component as a key with a list of remainders as values. Example: If the argument is ['foo.bar.baz', 'bar', 'foo.bletch'], the return value is {'foo': ['bar.baz', 'bletch'], 'bar': None}. Raises: TypeError if an argument is not a string. ValueError for duplicate arguments and for conflicting arguments (when an undotted name also appears as the first component of a dotted name). """
result = {} for field_name in indexed_fields: if not isinstance(field_name, basestring): raise TypeError('Field names must be strings; got %r' % (field_name,)) if '.' not in field_name: if field_name in result: raise ValueError('Duplicate field name %s' % field_name) result[field_name] = None else: head, tail = field_name.split('.', 1) if head not in result: result[head] = [tail] elif result[head] is None: raise ValueError('Field name %s conflicts with ancestor %s' % (field_name, head)) else: result[head].append(tail) return result
<SYSTEM_TASK:> Construct a Model subclass corresponding to a Message subclass. <END_TASK> <USER_TASK:> Description: def _make_model_class(message_type, indexed_fields, **props): """Construct a Model subclass corresponding to a Message subclass. Args: message_type: A Message subclass. indexed_fields: A list of dotted and undotted field names. **props: Additional properties with which to seed the class. Returns: A Model subclass whose properties correspond to those fields of message_type whose field name is listed in indexed_fields, plus the properties specified by the **props arguments. For dotted field names, a StructuredProperty is generated using a Model subclass created by a recursive call. Raises: Whatever _analyze_indexed_fields() raises. ValueError if a field name conflicts with a name in **props. ValueError if a field name is not valid field of message_type. ValueError if an undotted field name designates a MessageField. """
analyzed = _analyze_indexed_fields(indexed_fields) for field_name, sub_fields in analyzed.iteritems(): if field_name in props: raise ValueError('field name %s is reserved' % field_name) try: field = message_type.field_by_name(field_name) except KeyError: raise ValueError('Message type %s has no field named %s' % (message_type.__name__, field_name)) if isinstance(field, messages.MessageField): if not sub_fields: raise ValueError( 'MessageField %s cannot be indexed, only sub-fields' % field_name) sub_model_class = _make_model_class(field.type, sub_fields) prop = model.StructuredProperty(sub_model_class, field_name, repeated=field.repeated) else: if sub_fields is not None: raise ValueError( 'Unstructured field %s cannot have indexed sub-fields' % field_name) if isinstance(field, messages.EnumField): prop = EnumProperty(field.type, field_name, repeated=field.repeated) elif isinstance(field, messages.BytesField): prop = model.BlobProperty(field_name, repeated=field.repeated, indexed=True) else: # IntegerField, FloatField, BooleanField, StringField. prop = model.GenericProperty(field_name, repeated=field.repeated) props[field_name] = prop return model.MetaModel('_%s__Model' % message_type.__name__, (model.Model,), props)
<SYSTEM_TASK:> Compute and store a default value if necessary. <END_TASK> <USER_TASK:> Description: def _get_value(self, entity): """Compute and store a default value if necessary."""
value = super(_ClassKeyProperty, self)._get_value(entity) if not value: value = entity._class_key() self._store_value(entity, value) return value
<SYSTEM_TASK:> Internal helper to return the list of polymorphic base classes. <END_TASK> <USER_TASK:> Description: def _get_hierarchy(cls): """Internal helper to return the list of polymorphic base classes. This returns a list of class objects, e.g. [Animal, Feline, Cat]. """
bases = [] for base in cls.mro(): # pragma: no branch if hasattr(base, '_get_hierarchy'): bases.append(base) del bases[-1] # Delete PolyModel itself bases.reverse() return bases
<SYSTEM_TASK:> Converts a list of paths to environments to env_data. <END_TASK> <USER_TASK:> Description: def convert_to_env_data(mgr, env_paths, validator_func, activate_func, name_template, display_name_template, name_prefix): """Converts a list of paths to environments to env_data. env_data is a structure {name -> (ressourcedir, kernel spec)} """
env_data = {} for venv_dir in env_paths: venv_name = os.path.split(os.path.abspath(venv_dir))[1] kernel_name = name_template.format(name_prefix + venv_name) kernel_name = kernel_name.lower() if kernel_name in env_data: mgr.log.debug( "Found duplicate env kernel: %s, which would again point to %s. Using the first!", kernel_name, venv_dir) continue argv, language, resource_dir = validator_func(venv_dir) if not argv: # probably does not contain the kernel type (e.g. not R or python or does not contain # the kernel code itself) continue display_name = display_name_template.format(kernel_name) kspec_dict = {"argv": argv, "language": language, "display_name": display_name, "resource_dir": resource_dir } # the default vars are needed to save the vars in the function context def loader(env_dir=venv_dir, activate_func=activate_func, mgr=mgr): mgr.log.debug("Loading env data for %s" % env_dir) res = activate_func(mgr, env_dir) # mgr.log.info("PATH: %s" % res['PATH']) return res kspec = EnvironmentLoadingKernelSpec(loader, **kspec_dict) env_data.update({kernel_name: (resource_dir, kspec)}) return env_data
<SYSTEM_TASK:> Validates that this env contains an IPython kernel and returns info to start it <END_TASK> <USER_TASK:> Description: def validate_IPykernel(venv_dir): """Validates that this env contains an IPython kernel and returns info to start it Returns: tuple (ARGV, language, resource_dir) """
python_exe_name = find_exe(venv_dir, "python") if python_exe_name is None: python_exe_name = find_exe(venv_dir, "python2") if python_exe_name is None: python_exe_name = find_exe(venv_dir, "python3") if python_exe_name is None: return [], None, None # Make some checks for ipython first, because calling the import is expensive if find_exe(venv_dir, "ipython") is None: if find_exe(venv_dir, "ipython2") is None: if find_exe(venv_dir, "ipython3") is None: return [], None, None # check if this is really an ipython **kernel** import subprocess try: subprocess.check_call([python_exe_name, '-c', '"import ipykernel"']) except: # not installed? -> not useable in any case... return [], None, None argv = [python_exe_name, "-m", "ipykernel", "-f", "{connection_file}"] resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logos", "python") return argv, "python", resources_dir
<SYSTEM_TASK:> Validates that this env contains an IRkernel kernel and returns info to start it <END_TASK> <USER_TASK:> Description: def validate_IRkernel(venv_dir): """Validates that this env contains an IRkernel kernel and returns info to start it Returns: tuple (ARGV, language, resource_dir) """
r_exe_name = find_exe(venv_dir, "R") if r_exe_name is None: return [], None, None # check if this is really an IRkernel **kernel** import subprocess ressources_dir = None try: print_resources = 'cat(as.character(system.file("kernelspec", package = "IRkernel")))' resources_dir_bytes = subprocess.check_output([r_exe_name, '--slave', '-e', print_resources]) resources_dir = resources_dir_bytes.decode(errors='ignore') except: # not installed? -> not useable in any case... return [], None, None argv = [r_exe_name, "--slave", "-e", "IRkernel::main()", "--args", "{connection_file}"] if not os.path.exists(resources_dir.strip()): # Fallback to our own log, but don't get the nice js goodies... resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logos", "r") return argv, "r", resources_dir
<SYSTEM_TASK:> Finds a exe with that name in the environment path <END_TASK> <USER_TASK:> Description: def find_exe(env_dir, name): """Finds a exe with that name in the environment path"""
if platform.system() == "Windows": name = name + ".exe" # find the binary exe_name = os.path.join(env_dir, name) if not os.path.exists(exe_name): exe_name = os.path.join(env_dir, "bin", name) if not os.path.exists(exe_name): exe_name = os.path.join(env_dir, "Scripts", name) if not os.path.exists(exe_name): return None return exe_name
<SYSTEM_TASK:> Finds kernel specs from virtualenv environments <END_TASK> <USER_TASK:> Description: def get_virtualenv_env_data(mgr): """Finds kernel specs from virtualenv environments env_data is a structure {name -> (resourcedir, kernel spec)} """
if not mgr.find_virtualenv_envs: return {} mgr.log.debug("Looking for virtualenv environments in %s...", mgr.virtualenv_env_dirs) # find all potential env paths env_paths = find_env_paths_in_basedirs(mgr.virtualenv_env_dirs) mgr.log.debug("Scanning virtualenv environments for python kernels...") env_data = convert_to_env_data(mgr=mgr, env_paths=env_paths, validator_func=validate_IPykernel, activate_func=_get_env_vars_for_virtualenv_env, name_template=mgr.virtualenv_prefix_template, display_name_template=mgr.display_name_template, # virtualenv has only python, so no need for a prefix name_prefix="") return env_data
<SYSTEM_TASK:> Simply bash-specific wrapper around source-foreign <END_TASK> <USER_TASK:> Description: def source_bash(args, stdin=None): """Simply bash-specific wrapper around source-foreign Returns a dict to be used as a new environment"""
args = list(args) new_args = ['bash', '--sourcer=source'] new_args.extend(args) return source_foreign(new_args, stdin=stdin)
<SYSTEM_TASK:> Simply zsh-specific wrapper around source-foreign <END_TASK> <USER_TASK:> Description: def source_zsh(args, stdin=None): """Simply zsh-specific wrapper around source-foreign Returns a dict to be used as a new environment"""
args = list(args) new_args = ['zsh', '--sourcer=source'] new_args.extend(args) return source_foreign(new_args, stdin=stdin)
<SYSTEM_TASK:> Simple cmd.exe-specific wrapper around source-foreign. <END_TASK> <USER_TASK:> Description: def source_cmd(args, stdin=None): """Simple cmd.exe-specific wrapper around source-foreign. returns a dict to be used as a new environment """
args = list(args) fpath = locate_binary(args[0]) args[0] = fpath if fpath else args[0] if not os.path.isfile(args[0]): raise RuntimeError("Command not found: %s" % args[0]) prevcmd = 'call ' prevcmd += ' '.join([argvquote(arg, force=True) for arg in args]) prevcmd = escape_windows_cmd_string(prevcmd) args.append('--prevcmd={}'.format(prevcmd)) args.insert(0, 'cmd') args.append('--interactive=0') args.append('--sourcer=call') args.append('--envcmd=set') args.append('--seterrpostcmd=if errorlevel 1 exit 1') args.append('--use-tmpfile=1') return source_foreign(args, stdin=stdin)
<SYSTEM_TASK:> Sources a file written in a foreign shell language. <END_TASK> <USER_TASK:> Description: def source_foreign(args, stdin=None): """Sources a file written in a foreign shell language."""
parser = _ensure_source_foreign_parser() ns = parser.parse_args(args) if ns.prevcmd is not None: pass # don't change prevcmd if given explicitly elif os.path.isfile(ns.files_or_code[0]): # we have filename to source ns.prevcmd = '{} "{}"'.format(ns.sourcer, '" "'.join(ns.files_or_code)) elif ns.prevcmd is None: ns.prevcmd = ' '.join(ns.files_or_code) # code to run, no files fsenv = foreign_shell_data(shell=ns.shell, login=ns.login, interactive=ns.interactive, envcmd=ns.envcmd, aliascmd=ns.aliascmd, extra_args=ns.extra_args, safe=ns.safe, prevcmd=ns.prevcmd, postcmd=ns.postcmd, funcscmd=ns.funcscmd, sourcer=ns.sourcer, use_tmpfile=ns.use_tmpfile, seterrprevcmd=ns.seterrprevcmd, seterrpostcmd=ns.seterrpostcmd) if fsenv is None: raise RuntimeError("Source failed: {}\n".format(ns.prevcmd), 1) # apply results env = os.environ.copy() for k, v in fsenv.items(): if k in env and v == env[k]: continue # no change from original env[k] = v # Remove any env-vars that were unset by the script. for k in os.environ: # use os.environ again to prevent errors about changed size if k not in fsenv: env.pop(k, None) return env
<SYSTEM_TASK:> Converts to a boolean in a semantically meaningful way. <END_TASK> <USER_TASK:> Description: def to_bool(x): """"Converts to a boolean in a semantically meaningful way."""
if isinstance(x, bool): return x elif isinstance(x, str): return False if x.lower() in _FALSES else True else: return bool(x)
<SYSTEM_TASK:> Parses the environment portion of string into a dict. <END_TASK> <USER_TASK:> Description: def parse_env(s): """Parses the environment portion of string into a dict."""
m = ENV_RE.search(s) if m is None: return {} g1 = m.group(1) env = dict(ENV_SPLIT_RE.findall(g1)) return env
<SYSTEM_TASK:> Finds kernel specs from conda environments <END_TASK> <USER_TASK:> Description: def get_conda_env_data(mgr): """Finds kernel specs from conda environments env_data is a structure {name -> (resourcedir, kernel spec)} """
if not mgr.find_conda_envs: return {} mgr.log.debug("Looking for conda environments in %s...", mgr.conda_env_dirs) # find all potential env paths env_paths = find_env_paths_in_basedirs(mgr.conda_env_dirs) env_paths.extend(_find_conda_env_paths_from_conda(mgr)) env_paths = list(set(env_paths)) # remove duplicates mgr.log.debug("Scanning conda environments for python kernels...") env_data = convert_to_env_data(mgr=mgr, env_paths=env_paths, validator_func=validate_IPykernel, activate_func=_get_env_vars_for_conda_env, name_template=mgr.conda_prefix_template, display_name_template=mgr.display_name_template, name_prefix="") # lets keep the py kernels without a prefix... if mgr.find_r_envs: mgr.log.debug("Scanning conda environments for R kernels...") env_data.update(convert_to_env_data(mgr=mgr, env_paths=env_paths, validator_func=validate_IRkernel, activate_func=_get_env_vars_for_conda_env, name_template=mgr.conda_prefix_template, display_name_template=mgr.display_name_template, name_prefix="r_")) return env_data
<SYSTEM_TASK:> Check the name of the environment against the black list and the <END_TASK> <USER_TASK:> Description: def validate_env(self, envname): """ Check the name of the environment against the black list and the whitelist. If a whitelist is specified only it is checked. """
if self.whitelist_envs and envname in self.whitelist_envs: return True elif self.whitelist_envs: return False if self.blacklist_envs and envname not in self.blacklist_envs: return True elif self.blacklist_envs: # If there is just a True, all envs are blacklisted return False else: return True
<SYSTEM_TASK:> Get the data about the available environments. <END_TASK> <USER_TASK:> Description: def _get_env_data(self, reload=False): """Get the data about the available environments. env_data is a structure {name -> (resourcedir, kernel spec)} """
# This is called much too often and finding-process is really expensive :-( if not reload and getattr(self, "_env_data_cache", {}): return getattr(self, "_env_data_cache") env_data = {} for supplyer in ENV_SUPPLYER: env_data.update(supplyer(self)) env_data = {name: env_data[name] for name in env_data if self.validate_env(name)} new_kernels = [env for env in list(env_data.keys()) if env not in list(self._env_data_cache.keys())] if new_kernels: self.log.info("Found new kernels in environments: %s", ", ".join(new_kernels)) self._env_data_cache = env_data return env_data
<SYSTEM_TASK:> Returns the dict of name -> kernel_spec for all environments <END_TASK> <USER_TASK:> Description: def get_all_kernel_specs_for_envs(self): """Returns the dict of name -> kernel_spec for all environments"""
data = self._get_env_data() return {name: data[name][1] for name in data}
<SYSTEM_TASK:> Returns a dict mapping kernel names and resource directories. <END_TASK> <USER_TASK:> Description: def get_all_specs(self): """Returns a dict mapping kernel names and resource directories. """
# This is new in 4.1 -> https://github.com/jupyter/jupyter_client/pull/93 specs = self.get_all_kernel_specs_for_envs() specs.update(super(EnvironmentKernelSpecManager, self).get_all_specs()) return specs
<SYSTEM_TASK:> Parses the incoming bytestream as XML and returns the resulting data. <END_TASK> <USER_TASK:> Description: def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as XML and returns the resulting data. """
assert etree, 'XMLParser requires defusedxml to be installed' parser_context = parser_context or {} encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) parser = etree.DefusedXMLParser(encoding=encoding) try: tree = etree.parse(stream, parser=parser, forbid_dtd=True) except (etree.ParseError, ValueError) as exc: raise ParseError('XML parse error - %s' % six.text_type(exc)) data = self._xml_convert(tree.getroot()) return data
<SYSTEM_TASK:> convert the xml `element` into the corresponding python object <END_TASK> <USER_TASK:> Description: def _xml_convert(self, element): """ convert the xml `element` into the corresponding python object """
children = list(element) if len(children) == 0: return self._type_convert(element.text) else: # if the fist child tag is list-item means all children are list-item if children[0].tag == "list-item": data = [] for child in children: data.append(self._xml_convert(child)) else: data = {} for child in children: data[child.tag] = self._xml_convert(child) return data
<SYSTEM_TASK:> Converts the value returned by the XMl parse into the equivalent <END_TASK> <USER_TASK:> Description: def _type_convert(self, value): """ Converts the value returned by the XMl parse into the equivalent Python type """
if value is None: return value try: return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S') except ValueError: pass try: return int(value) except ValueError: pass try: return decimal.Decimal(value) except decimal.InvalidOperation: pass return value
<SYSTEM_TASK:> Renders `data` into serialized XML. <END_TASK> <USER_TASK:> Description: def render(self, data, accepted_media_type=None, renderer_context=None): """ Renders `data` into serialized XML. """
if data is None: return '' stream = StringIO() xml = SimplerXMLGenerator(stream, self.charset) xml.startDocument() xml.startElement(self.root_tag_name, {}) self._to_xml(xml, data) xml.endElement(self.root_tag_name) xml.endDocument() return stream.getvalue()
<SYSTEM_TASK:> Open a connection to the device. <END_TASK> <USER_TASK:> Description: def open(self): """Open a connection to the device."""
device_type = 'cisco_ios' if self.transport == 'telnet': device_type = 'cisco_ios_telnet' self.device = ConnectHandler(device_type=device_type, host=self.hostname, username=self.username, password=self.password, **self.netmiko_optional_args) # ensure in enable mode self.device.enable()
<SYSTEM_TASK:> Write temp file and for use with inline config and SCP. <END_TASK> <USER_TASK:> Description: def _create_tmp_file(config): """Write temp file and for use with inline config and SCP."""
tmp_dir = tempfile.gettempdir() rand_fname = py23_compat.text_type(uuid.uuid4()) filename = os.path.join(tmp_dir, rand_fname) with open(filename, 'wt') as fobj: fobj.write(config) return filename
<SYSTEM_TASK:> Transfer file to remote device for either merge or replace operations <END_TASK> <USER_TASK:> Description: def _load_candidate_wrapper(self, source_file=None, source_config=None, dest_file=None, file_system=None): """ Transfer file to remote device for either merge or replace operations Returns (return_status, msg) """
return_status = False msg = '' if source_file and source_config: raise ValueError("Cannot simultaneously set source_file and source_config") if source_config: if self.inline_transfer: (return_status, msg) = self._inline_tcl_xfer(source_config=source_config, dest_file=dest_file, file_system=file_system) else: # Use SCP tmp_file = self._create_tmp_file(source_config) (return_status, msg) = self._scp_file(source_file=tmp_file, dest_file=dest_file, file_system=file_system) if tmp_file and os.path.isfile(tmp_file): os.remove(tmp_file) if source_file: if self.inline_transfer: (return_status, msg) = self._inline_tcl_xfer(source_file=source_file, dest_file=dest_file, file_system=file_system) else: (return_status, msg) = self._scp_file(source_file=source_file, dest_file=dest_file, file_system=file_system) if not return_status: if msg == '': msg = "Transfer to remote device failed" return (return_status, msg)
<SYSTEM_TASK:> SCP file to device filesystem, defaults to candidate_config. <END_TASK> <USER_TASK:> Description: def load_replace_candidate(self, filename=None, config=None): """ SCP file to device filesystem, defaults to candidate_config. Return None or raise exception """
self.config_replace = True return_status, msg = self._load_candidate_wrapper(source_file=filename, source_config=config, dest_file=self.candidate_cfg, file_system=self.dest_file_system) if not return_status: raise ReplaceConfigException(msg)
<SYSTEM_TASK:> Special handler for hostname change on commit operation. <END_TASK> <USER_TASK:> Description: def _commit_hostname_handler(self, cmd): """Special handler for hostname change on commit operation."""
current_prompt = self.device.find_prompt().strip() terminating_char = current_prompt[-1] pattern = r"[>#{}]\s*$".format(terminating_char) # Look exclusively for trailing pattern that includes '#' and '>' output = self.device.send_command_expect(cmd, expect_string=pattern) # Reset base prompt in case hostname changed self.device.set_base_prompt() return output
<SYSTEM_TASK:> Generate full file path on remote device. <END_TASK> <USER_TASK:> Description: def _gen_full_path(self, filename, file_system=None): """Generate full file path on remote device."""
if file_system is None: return '{}/{}'.format(self.dest_file_system, filename) else: if ":" not in file_system: raise ValueError("Invalid file_system specified: {}".format(file_system)) return '{}/{}'.format(file_system, filename)
<SYSTEM_TASK:> Save a configuration that can be used for rollback. <END_TASK> <USER_TASK:> Description: def _gen_rollback_cfg(self): """Save a configuration that can be used for rollback."""
cfg_file = self._gen_full_path(self.rollback_cfg) cmd = 'copy running-config {}'.format(cfg_file) self._disable_confirm() self.device.send_command_expect(cmd) self._enable_confirm()
<SYSTEM_TASK:> Check that the file exists on remote device using full path. <END_TASK> <USER_TASK:> Description: def _check_file_exists(self, cfg_file): """ Check that the file exists on remote device using full path. cfg_file is full path i.e. flash:/file_name For example # dir flash:/candidate_config.txt Directory of flash:/candidate_config.txt 33 -rw- 5592 Dec 18 2015 10:50:22 -08:00 candidate_config.txt return boolean """
cmd = 'dir {}'.format(cfg_file) success_pattern = 'Directory of {}'.format(cfg_file) output = self.device.send_command_expect(cmd) if 'Error opening' in output: return False elif success_pattern in output: return True return False
<SYSTEM_TASK:> Obtain the full interface name from the abbreviated name. <END_TASK> <USER_TASK:> Description: def _expand_interface_name(self, interface_brief): """ Obtain the full interface name from the abbreviated name. Cache mappings in self.interface_map. """
if self.interface_map.get(interface_brief): return self.interface_map.get(interface_brief) command = 'show int {}'.format(interface_brief) output = self._send_command(command) first_line = output.splitlines()[0] if 'line protocol' in first_line: full_int_name = first_line.split()[0] self.interface_map[interface_brief] = full_int_name return self.interface_map.get(interface_brief) else: return interface_brief
<SYSTEM_TASK:> IOS implementation of get_lldp_neighbors_detail. <END_TASK> <USER_TASK:> Description: def get_lldp_neighbors_detail(self, interface=''): """ IOS implementation of get_lldp_neighbors_detail. Calls get_lldp_neighbors. """
lldp = {} lldp_neighbors = self.get_lldp_neighbors() # Filter to specific interface if interface: lldp_data = lldp_neighbors.get(interface) if lldp_data: lldp_neighbors = {interface: lldp_data} else: lldp_neighbors = {} for interface in lldp_neighbors: local_port = interface lldp_fields = self._lldp_detail_parser(interface) # Convert any 'not advertised' to 'N/A' for field in lldp_fields: for i, value in enumerate(field): if 'not advertised' in value: field[i] = 'N/A' number_entries = len(lldp_fields[0]) # re.findall will return a list. Make sure same number of entries always returned. for test_list in lldp_fields: if len(test_list) != number_entries: raise ValueError("Failure processing show lldp neighbors detail") # Standardize the fields port_id, port_description, chassis_id, system_name, system_description, \ system_capabilities, enabled_capabilities, remote_address = lldp_fields standardized_fields = zip(port_id, port_description, chassis_id, system_name, system_description, system_capabilities, enabled_capabilities, remote_address) lldp.setdefault(local_port, []) for entry in standardized_fields: remote_port_id, remote_port_description, remote_chassis_id, remote_system_name, \ remote_system_description, remote_system_capab, remote_enabled_capab, \ remote_mgmt_address = entry lldp[local_port].append({ 'parent_interface': u'N/A', 'remote_port': remote_port_id, 'remote_port_description': remote_port_description, 'remote_chassis_id': remote_chassis_id, 'remote_system_name': remote_system_name, 'remote_system_description': remote_system_description, 'remote_system_capab': remote_system_capab, 'remote_system_enable_capab': remote_enabled_capab}) return lldp
<SYSTEM_TASK:> Convert string time to seconds. <END_TASK> <USER_TASK:> Description: def bgp_time_conversion(bgp_uptime): """ Convert string time to seconds. Examples 00:14:23 00:13:40 00:00:21 00:00:13 00:00:49 1d11h 1d17h 1w0d 8w5d 1y28w never """
bgp_uptime = bgp_uptime.strip() uptime_letters = set(['w', 'h', 'd']) if 'never' in bgp_uptime: return -1 elif ':' in bgp_uptime: times = bgp_uptime.split(":") times = [int(x) for x in times] hours, minutes, seconds = times return (hours * 3600) + (minutes * 60) + seconds # Check if any letters 'w', 'h', 'd' are in the time string elif uptime_letters & set(bgp_uptime): form1 = r'(\d+)d(\d+)h' # 1d17h form2 = r'(\d+)w(\d+)d' # 8w5d form3 = r'(\d+)y(\d+)w' # 1y28w match = re.search(form1, bgp_uptime) if match: days = int(match.group(1)) hours = int(match.group(2)) return (days * DAY_SECONDS) + (hours * 3600) match = re.search(form2, bgp_uptime) if match: weeks = int(match.group(1)) days = int(match.group(2)) return (weeks * WEEK_SECONDS) + (days * DAY_SECONDS) match = re.search(form3, bgp_uptime) if match: years = int(match.group(1)) weeks = int(match.group(2)) return (years * YEAR_SECONDS) + (weeks * WEEK_SECONDS) raise ValueError("Unexpected value for BGP uptime string: {}".format(bgp_uptime))
<SYSTEM_TASK:> Execute a list of commands and return the output in a dictionary format using the command <END_TASK> <USER_TASK:> Description: def cli(self, commands): """ Execute a list of commands and return the output in a dictionary format using the command as the key. Example input: ['show clock', 'show calendar'] Output example: { 'show calendar': u'22:02:01 UTC Thu Feb 18 2016', 'show clock': u'*22:01:51.165 UTC Thu Feb 18 2016'} """
cli_output = dict() if type(commands) is not list: raise TypeError('Please enter a valid list of commands!') for command in commands: output = self._send_command(command) if 'Invalid input detected' in output: raise ValueError('Unable to execute command "{}"'.format(command)) cli_output.setdefault(command, {}) cli_output[command] = output return cli_output
<SYSTEM_TASK:> Implementation of get_config for IOS. <END_TASK> <USER_TASK:> Description: def get_config(self, retrieve='all'): """Implementation of get_config for IOS. Returns the startup or/and running configuration as dictionary. The keys of the dictionary represent the type of configuration (startup or running). The candidate is always empty string, since IOS does not support candidate configuration. """
configs = { 'startup': '', 'running': '', 'candidate': '', } if retrieve in ('startup', 'all'): command = 'show startup-config' output = self._send_command(command) configs['startup'] = output if retrieve in ('running', 'all'): command = 'show running-config' output = self._send_command(command) configs['running'] = output return configs
<SYSTEM_TASK:> Read the current value of the accelerometer and return it as a tuple <END_TASK> <USER_TASK:> Description: def read(self): """Read the current value of the accelerometer and return it as a tuple of signed 16-bit X, Y, Z axis values. """
raw = self._device.readList(ADXL345_REG_DATAX0, 6) return struct.unpack('<hhh', raw)
<SYSTEM_TASK:> Writes the value to the input pin specified. <END_TASK> <USER_TASK:> Description: def digital_write(pin_num, value, hardware_addr=0): """Writes the value to the input pin specified. .. note:: This function is for familiarality with users of other types of IO board. Consider accessing the ``output_pins`` attribute of a PiFaceDigital object: >>> pfd = PiFaceDigital(hardware_addr) >>> pfd.output_pins[pin_num].value = 1 :param pin_num: The pin number to write to. :type pin_num: int :param value: The value to write. :type value: int :param hardware_addr: The board to read from (default: 0) :type hardware_addr: int """
_get_pifacedigital(hardware_addr).output_pins[pin_num].value = value
<SYSTEM_TASK:> Writes the value to the input pullup specified. <END_TASK> <USER_TASK:> Description: def digital_write_pullup(pin_num, value, hardware_addr=0): """Writes the value to the input pullup specified. .. note:: This function is for familiarality with users of other types of IO board. Consider accessing the ``gppub`` attribute of a PiFaceDigital object: >>> pfd = PiFaceDigital(hardware_addr) >>> hex(pfd.gppub.value) 0xff >>> pfd.gppub.bits[pin_num].value = 1 :param pin_num: The pin number to write to. :type pin_num: int :param value: The value to write. :type value: int :param hardware_addr: The board to read from (default: 0) :type hardware_addr: int """
_get_pifacedigital(hardware_addr).gppub.bits[pin_num].value = value
<SYSTEM_TASK:> Returns this computers IP address as a string. <END_TASK> <USER_TASK:> Description: def get_my_ip(): """Returns this computers IP address as a string."""
ip = subprocess.check_output(GET_IP_CMD, shell=True).decode('utf-8')[:-1] return ip.strip()
<SYSTEM_TASK:> Sets the output port value to new_value, defaults to old_value. <END_TASK> <USER_TASK:> Description: def set_output_port(self, new_value, old_value=0): """Sets the output port value to new_value, defaults to old_value."""
print("Setting output port to {}.".format(new_value)) port_value = old_value try: port_value = int(new_value) # dec except ValueError: port_value = int(new_value, 16) # hex finally: self.pifacedigital.output_port.value = port_value return port_value
<SYSTEM_TASK:> Wrap the calls the url, with the given arguments. <END_TASK> <USER_TASK:> Description: def _request_api(self, **kwargs): """Wrap the calls the url, with the given arguments. :param str url: Url to call with the given arguments :param str method: [POST | GET] Method to use on the request :param int status: Expected status code """
_url = kwargs.get('url') _method = kwargs.get('method', 'GET') _status = kwargs.get('status', 200) counter = 0 if _method not in ['GET', 'POST']: raise ValueError('Method is not GET or POST') while True: try: res = REQ[_method](_url, cookies=self._cookie) if res.status_code == _status: break else: raise BadStatusException(res.content) except requests.exceptions.BaseHTTPError: if counter < self._retries: counter += 1 continue raise MaxRetryError self._last_result = res return res
<SYSTEM_TASK:> Get info about a user based on his id. <END_TASK> <USER_TASK:> Description: def get_infos_with_id(self, uid): """Get info about a user based on his id. :return: JSON """
_logid = uid _user_info_url = USER_INFO_URL.format(logid=_logid) return self._request_api(url=_user_info_url).json()
<SYSTEM_TASK:> Get the current activities of user. <END_TASK> <USER_TASK:> Description: def get_current_activities(self, login=None, **kwargs): """Get the current activities of user. Either use the `login` param, or the client's login if unset. :return: JSON """
_login = kwargs.get( 'login', login or self._login ) _activity_url = ACTIVITY_URL.format(login=_login) return self._request_api(url=_activity_url).json()
<SYSTEM_TASK:> Get the current notifications of a user. <END_TASK> <USER_TASK:> Description: def get_notifications(self, login=None, **kwargs): """Get the current notifications of a user. :return: JSON """
_login = kwargs.get( 'login', login or self._login ) _notif_url = NOTIF_URL.format(login=_login) return self._request_api(url=_notif_url).json()
<SYSTEM_TASK:> Get a user's grades on a single promotion based on his login. <END_TASK> <USER_TASK:> Description: def get_grades(self, login=None, promotion=None, **kwargs): """Get a user's grades on a single promotion based on his login. Either use the `login` param, or the client's login if unset. :return: JSON """
_login = kwargs.get( 'login', login or self._login ) _promotion_id = kwargs.get('promotion', promotion) _grades_url = GRADES_URL.format(login=_login, promo_id=_promotion_id) return self._request_api(url=_grades_url).json()
<SYSTEM_TASK:> Get the related activities of a project. <END_TASK> <USER_TASK:> Description: def get_activities_for_project(self, module=None, **kwargs): """Get the related activities of a project. :param str module: Stages of a given module :return: JSON """
_module_id = kwargs.get('module', module) _activities_url = ACTIVITIES_URL.format(module_id=_module_id) return self._request_api(url=_activities_url).json()
<SYSTEM_TASK:> Get groups for activity. <END_TASK> <USER_TASK:> Description: def get_group_for_activity(self, module=None, project=None, **kwargs): """Get groups for activity. :param str module: Base module :param str module: Project which contains the group requested :return: JSON """
_module_id = kwargs.get('module', module) _project_id = kwargs.get('project', project) _url = GROUPS_URL.format(module_id=_module_id, project_id=_project_id) return self._request_api(url=_url).json()
<SYSTEM_TASK:> Get users by promotion id. <END_TASK> <USER_TASK:> Description: def get_students(self, **kwargs): """Get users by promotion id. :param int promotion: Promotion ID :return: JSON """
_promotion_id = kwargs.get('promotion') _url = PROMOTION_URL.format(promo_id=_promotion_id) return self._request_api(url=_url).json()
<SYSTEM_TASK:> Registers a collector <END_TASK> <USER_TASK:> Description: def register(self, collector): """ Registers a collector"""
if not isinstance(collector, Collector): raise TypeError( "Can't register instance, not a valid type of collector") if collector.name in self.collectors: raise ValueError("Collector already exists or name colision") with mutex: self.collectors[collector.name] = collector
<SYSTEM_TASK:> Sets a value in the container <END_TASK> <USER_TASK:> Description: def set_value(self, labels, value): """ Sets a value in the container"""
if labels: self._label_names_correct(labels) with mutex: self.values[labels] = value
<SYSTEM_TASK:> Returns a list populated by tuples of 2 elements, first one is <END_TASK> <USER_TASK:> Description: def get_all(self): """ Returns a list populated by tuples of 2 elements, first one is a dict with all the labels and the second elemnt is the value of the metric itself """
with mutex: items = self.values.items() result = [] for k, v in items: # Check if is a single value dict (custom empty key) if not k or k == MetricDict.EMPTY_KEY: key = None else: key = decoder.decode(k) result.append((key, self.get(k))) return result
<SYSTEM_TASK:> Add adds a single observation to the summary. <END_TASK> <USER_TASK:> Description: def add(self, labels, value): """Add adds a single observation to the summary."""
if type(value) not in (float, int): raise TypeError("Summary only works with digits (int, float)") # We have already a lock for data but not for the estimator with mutex: try: e = self.get_value(labels) except KeyError: # Initialize quantile estimator e = quantile.Estimator(*self.__class__.DEFAULT_INVARIANTS) self.set_value(labels, e) e.observe(float(value))
<SYSTEM_TASK:> Get gets the data in the form of 0.5, 0.9 and 0.99 percentiles. Also <END_TASK> <USER_TASK:> Description: def get(self, labels): """ Get gets the data in the form of 0.5, 0.9 and 0.99 percentiles. Also you get sum and count, all in a dict """
return_data = {} # We have already a lock for data but not for the estimator with mutex: e = self.get_value(labels) # Set invariants data (default to 0.50, 0.90 and 0.99) for i in e._invariants: q = i._quantile return_data[q] = e.query(q) # Set sum and count return_data[self.__class__.SUM_KEY] = e._sum return_data[self.__class__.COUNT_KEY] = e._observations return return_data
<SYSTEM_TASK:> Returns the first child that matches the given name and <END_TASK> <USER_TASK:> Description: def get_child(self, name, attribs=None): """ Returns the first child that matches the given name and attributes. """
if name == '.': if attribs is None or len(attribs) == 0: return self if attribs == self.attribs: return self return self.child_index.get(nodehash(name, attribs))
<SYSTEM_TASK:> Creates the given node, regardless of whether or not it already <END_TASK> <USER_TASK:> Description: def create(self, path, data=None): """ Creates the given node, regardless of whether or not it already exists. Returns the new node. """
node = self.current[-1] path = self._splitpath(path) n_items = len(path) for n, item in enumerate(path): tag, attribs = self._splittag(item) # The leaf node is always newly created. if n == n_items-1: node = node.add(Node(tag, attribs)) break # Parent nodes are only created if they do not exist yet. existing = node.get_child(tag, attribs) if existing is not None: node = existing else: node = node.add(Node(tag, attribs)) if data: node.text = unquote(data) return node
<SYSTEM_TASK:> Creates and enters the given node, regardless of whether it already <END_TASK> <USER_TASK:> Description: def open(self, path): """ Creates and enters the given node, regardless of whether it already exists. Returns the new node. """
self.current.append(self.create(path)) return self.current[-1]
<SYSTEM_TASK:> Enters the given node. Creates it if it does not exist. <END_TASK> <USER_TASK:> Description: def enter(self, path): """ Enters the given node. Creates it if it does not exist. Returns the node. """
self.current.append(self.add(path)) return self.current[-1]
<SYSTEM_TASK:> Get tid of the best trial <END_TASK> <USER_TASK:> Description: def best_trial_tid(self, rank=0): """Get tid of the best trial rank=0 means the best model rank=1 means second best ... """
candidates = [t for t in self.trials if t['result']['status'] == STATUS_OK] if len(candidates) == 0: return None losses = [float(t['result']['loss']) for t in candidates] assert not np.any(np.isnan(losses)) lid = np.where(np.argsort(losses).argsort() == rank)[0][0] return candidates[lid]["tid"]
<SYSTEM_TASK:> Load saved keras model of the trial. <END_TASK> <USER_TASK:> Description: def load_model(self, tid, custom_objects=None): """Load saved keras model of the trial. If tid = None, get the best model Not applicable for trials ran in cross validion (i.e. not applicable for `CompileFN.cv_n_folds is None` """
if tid is None: tid = self.best_trial_tid() model_path = self.get_trial(tid)["result"]["path"]["model"] return load_model(model_path, custom_objects=custom_objects)
<SYSTEM_TASK:> Return a list of results with ok status <END_TASK> <USER_TASK:> Description: def get_ok_results(self, verbose=True): """Return a list of results with ok status """
if len(self.trials) == 0: return [] not_ok = np.where(np.array(self.statuses()) != "ok")[0] if len(not_ok) > 0 and verbose: print("{0}/{1} trials were not ok.".format(len(not_ok), len(self.trials))) print("Trials: " + str(not_ok)) print("Statuses: " + str(np.array(self.statuses())[not_ok])) r = [merge_dicts({"tid": t["tid"]}, t["result"].to_dict()) for t in self.trials if t["result"]["status"] == "ok"] return r
<SYSTEM_TASK:> Add all authentication methods specified in the configuration. <END_TASK> <USER_TASK:> Description: def setup_authentication_methods(authn_config, template_env): """Add all authentication methods specified in the configuration."""
routing = {} ac = AuthnBroker() for authn_method in authn_config: cls = make_cls_from_name(authn_method["class"]) instance = cls(template_env=template_env, **authn_method["kwargs"]) ac.add(authn_method["acr"], instance) routing[instance.url_endpoint] = VerifierMiddleware(instance) return ac, routing
<SYSTEM_TASK:> Setup the OpenID Connect Provider endpoints. <END_TASK> <USER_TASK:> Description: def setup_endpoints(provider): """Setup the OpenID Connect Provider endpoints."""
app_routing = {} endpoints = [ AuthorizationEndpoint( pyoidcMiddleware(provider.authorization_endpoint)), TokenEndpoint( pyoidcMiddleware(provider.token_endpoint)), UserinfoEndpoint( pyoidcMiddleware(provider.userinfo_endpoint)), RegistrationEndpoint( pyoidcMiddleware(provider.registration_endpoint)), EndSessionEndpoint( pyoidcMiddleware(provider.endsession_endpoint)) ] for ep in endpoints: app_routing["/{}".format(ep.etype)] = ep return app_routing
<SYSTEM_TASK:> Converts a dictionary of keyword arguments into a tuple <END_TASK> <USER_TASK:> Description: def featuresQuery(self, **kwargs): """ Converts a dictionary of keyword arguments into a tuple of SQL select statements and the list of SQL arguments """
# TODO: Optimize by refactoring out string concatenation sql = "" sql_rows = "SELECT * FROM FEATURE WHERE id > 1 " sql_args = () if 'name' in kwargs and kwargs['name']: sql += "AND name = ? " sql_args += (kwargs.get('name'),) if 'geneSymbol' in kwargs and kwargs['geneSymbol']: sql += "AND gene_name = ? " sql_args += (kwargs.get('geneSymbol'),) if 'start' in kwargs and kwargs['start'] is not None: sql += "AND end > ? " sql_args += (kwargs.get('start'),) if 'end' in kwargs and kwargs['end'] is not None: sql += "AND start < ? " sql_args += (kwargs.get('end'),) if 'referenceName' in kwargs and kwargs['referenceName']: sql += "AND reference_name = ?" sql_args += (kwargs.get('referenceName'),) if 'parentId' in kwargs and kwargs['parentId']: sql += "AND parent_id = ? " sql_args += (kwargs['parentId'],) if kwargs.get('featureTypes') is not None \ and len(kwargs['featureTypes']) > 0: sql += "AND type IN (" sql += ", ".join(["?", ] * len(kwargs.get('featureTypes'))) sql += ") " sql_args += tuple(kwargs.get('featureTypes')) sql_rows += sql sql_rows += " ORDER BY reference_name, start, end ASC " return sql_rows, sql_args
<SYSTEM_TASK:> Perform a full features query in database. <END_TASK> <USER_TASK:> Description: def searchFeaturesInDb( self, startIndex=0, maxResults=None, referenceName=None, start=None, end=None, parentId=None, featureTypes=None, name=None, geneSymbol=None): """ Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data. """
# TODO: Refactor out common bits of this and the above count query. sql, sql_args = self.featuresQuery( startIndex=startIndex, maxResults=maxResults, referenceName=referenceName, start=start, end=end, parentId=parentId, featureTypes=featureTypes, name=name, geneSymbol=geneSymbol) sql += sqlite_backend.limitsSql(startIndex, maxResults) query = self._dbconn.execute(sql, sql_args) return sqlite_backend.sqliteRowsToDicts(query.fetchall())