code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class JsonSerializer(object): <NEW_LINE> <INDENT> __json_public__ = None <NEW_LINE> __json_hidden__ = None <NEW_LINE> __json_modifiers__ = None <NEW_LINE> def get_field_names(self): <NEW_LINE> <INDENT> for p in self.__mapper__.iterate_properties: <NEW_LINE> <INDENT> yield p.key <NEW_LINE> <DEDENT> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> field_names = self.get_field_names() <NEW_LINE> public = self.__json_public__ or field_names <NEW_LINE> hidden = self.__json_hidden__ or [] <NEW_LINE> modifiers = self.__json_modifiers__ or dict() <NEW_LINE> rv = dict() <NEW_LINE> for key in public: <NEW_LINE> <INDENT> rv[key] = getattr(self, key) <NEW_LINE> <DEDENT> for key, modifier in modifiers.items(): <NEW_LINE> <INDENT> value = getattr(self, key) <NEW_LINE> rv[key] = modifier(value, self) <NEW_LINE> <DEDENT> for key in hidden: <NEW_LINE> <INDENT> rv.pop(key, None) <NEW_LINE> return rv | A mixin that can be used to mark a SQLAlchemy model class which
implements a :func:`to_json` method. The :func:`to_json` method is used
in conjuction with the custom :class:`JSONEncoder` class. By default this
mixin will assume all properties of the SQLAlchemy model are to be visible
in the JSON output. Extend this class to customize which properties are
public, hidden or modified before being being passed to the JSON serializer. | 625990688e7ae83300eea83a |
class ConnectorCodecAdapter(Codec): <NEW_LINE> <INDENT> def __init__(self, codec): <NEW_LINE> <INDENT> self.codec = codec <NEW_LINE> <DEDENT> def encode(self, value): <NEW_LINE> <INDENT> return self.codec.encode(value) <NEW_LINE> <DEDENT> def decode(self, data, mask=None): <NEW_LINE> <INDENT> return self.codec.decode(data) | Removes the type parameter and forwards the calls to the
more basic codec types. | 6259906826068e7796d4e0e5 |
class SampleIngestionViewSet(mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = SampleIngestion.objects.all() <NEW_LINE> serializer_class = SampleIngestionSerializer <NEW_LINE> lookup_field = 'name_slug' <NEW_LINE> lookup_url_kwarg = 'name_and_gene_collection' | retrieve:
Return SampleIngestion data, use this endpoint to check the status of the ingestion process
list:
list SampleIngestion data.
delete:
Delete the Coverage data associated to a sample for a gene collection
create:
Trigger the ingestion of Coverage data, given a file a sample and a gene collection.
You will ingest coverage data for ONE sample and ONE gene collection,
these 2 fields, form a unique id, so you can not ingest the new data for
the same sample and same gene collection more than once before to delete
the first one. | 625990683eb6a72ae038be0d |
class StreamLimitMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, app, maximum_size=1024 * 1024 * 10): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.maximum_size = maximum_size <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> environ['wsgi.input'] = _SilentLimitedStream(environ, self.maximum_size) <NEW_LINE> return self.app(environ, start_response) | Limits the input stream to a given number of bytes. This is useful if
you have a WSGI application that reads form data into memory (django for
example) and you don't want users to harm the server by uploading tons of
data.
Default is 10MB | 62599068f548e778e596cd38 |
class PrettyStackTemplate(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._template = TEMPLATE_FOLDER.joinpath("console.jinja2") <NEW_LINE> self._cut_calling_code = None <NEW_LINE> self._only_the_exception = False <NEW_LINE> <DEDENT> def to_console(self): <NEW_LINE> <INDENT> new_template = copy(self) <NEW_LINE> new_template._template = TEMPLATE_FOLDER.joinpath("console.jinja2") <NEW_LINE> return new_template <NEW_LINE> <DEDENT> def cut_calling_code(self, filename): <NEW_LINE> <INDENT> if not Path(filename).exists(): <NEW_LINE> <INDENT> raise exceptions.StackTraceFilenameNotFound(filename) <NEW_LINE> <DEDENT> new_template = copy(self) <NEW_LINE> new_template._cut_calling_code = Path(filename).abspath() <NEW_LINE> return new_template <NEW_LINE> <DEDENT> def only_the_exception(self): <NEW_LINE> <INDENT> new_template = copy(self) <NEW_LINE> new_template._only_the_exception = True <NEW_LINE> return new_template <NEW_LINE> <DEDENT> def from_stacktrace_data(self, data): <NEW_LINE> <INDENT> tracebacks = [PrettyTraceback(traceback) for traceback in data['tracebacks']] <NEW_LINE> if self._cut_calling_code is not None: <NEW_LINE> <INDENT> updated_tracebacks = [] <NEW_LINE> start_including = False <NEW_LINE> for traceback in tracebacks: <NEW_LINE> <INDENT> if start_including and traceback.abspath != self._cut_calling_code: <NEW_LINE> <INDENT> updated_tracebacks.append(traceback) <NEW_LINE> <DEDENT> if traceback.abspath == self._cut_calling_code: <NEW_LINE> <INDENT> start_including = True <NEW_LINE> <DEDENT> <DEDENT> tracebacks = updated_tracebacks <NEW_LINE> <DEDENT> env = Environment() <NEW_LINE> env.loader = FileSystemLoader(str(self._template.dirname())) <NEW_LINE> tmpl = env.get_template(str(self._template.basename())) <NEW_LINE> return tmpl.render( stacktrace={ 'tracebacks': [traceback.to_dict() for traceback in tracebacks], 'exception': data['exception_string'], 'exception_type': data['exception_type'], 'docstring': data['docstring'], }, Fore=colorama.Fore, Back=colorama.Back, Style=colorama.Style, only_the_exception=self._only_the_exception, ) <NEW_LINE> <DEDENT> def current_stacktrace(self): <NEW_LINE> <INDENT> return self.from_stacktrace_data(utils.current_stack_trace_data()) | Template for generating pretty stacktraces on command. | 625990683317a56b869bf119 |
class Collections(enum.Enum): <NEW_LINE> <INDENT> PROJECTS = ( 'projects', 'projects/{projectsId}', {}, [u'projectsId'], True ) <NEW_LINE> PROJECTS_LOCATIONS = ( 'projects.locations', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_LOCATIONS_INSTANCES = ( 'projects.locations.instances', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}/instances/' '{instancesId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_LOCATIONS_OPERATIONS = ( 'projects.locations.operations', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}/operations/' '{operationsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_LOCATIONS_SNAPSHOTS = ( 'projects.locations.snapshots', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}/snapshots/' '{snapshotsId}', }, [u'name'], True ) <NEW_LINE> def __init__(self, collection_name, path, flat_paths, params, enable_uri_parsing): <NEW_LINE> <INDENT> self.collection_name = collection_name <NEW_LINE> self.path = path <NEW_LINE> self.flat_paths = flat_paths <NEW_LINE> self.params = params <NEW_LINE> self.enable_uri_parsing = enable_uri_parsing | Collections for all supported apis. | 625990680c0af96317c57935 |
class Album(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length = 80) <NEW_LINE> description = models.TextField(blank=False) <NEW_LINE> genres = [] <NEW_LINE> artist = models.CharField(max_length=50) <NEW_LINE> songs = [] <NEW_LINE> state = models.IntegerField() | DEFINES AN ALBUM | 62599068adb09d7d5dc0bd17 |
class PolyLR(WarmUpLRScheduler): <NEW_LINE> <INDENT> def __init__(self, optimizer, total_epoch, iteration_per_epoch, warmup_epochs=0, iteration_decay=True, power=0.9): <NEW_LINE> <INDENT> self.power = power <NEW_LINE> super(PolyLR, self).__init__(optimizer, total_epoch, iteration_per_epoch, warmup_epochs, iteration_decay) <NEW_LINE> <DEDENT> def get_lr(self, epoch, iter): <NEW_LINE> <INDENT> T = epoch * self.iteration_per_epoch + iter <NEW_LINE> return [base_lr * ((1 - 1.0 * T / self.total_iteration) ** self.power) for base_lr in self.base_lrs] | Sets the learning rate of each parameter group to the initial lr
multiply by (1 - iter / total_iter) ** gamma.
Args:
optimizer (Optimizer): Wrapped optimizer.
power (float): Multiplicative factor of learning rate decay.
Default: 0.1.
total_iter(int) : Total epoch
last_epoch (int): The index of last epoch. Default: -1. | 6259906823849d37ff852862 |
class River(Link): <NEW_LINE> <INDENT> type = "river" <NEW_LINE> _properties = {'flow':0} | A river which establishes a connection between two reservoirs or a
junction and a reservoir. No hydrological or hydraulic routing is
implemented. | 6259906871ff763f4b5e8f53 |
class ScanSummary(db.Model, WithConcurrentGetOrCreate): <NEW_LINE> <INDENT> job_id = db.CharField(unique=True, max_length=36) <NEW_LINE> current_checksum = db.CharField(max_length=32, blank=True, null=True) <NEW_LINE> previous_checksum = db.CharField(max_length=32, blank=True, null=True) <NEW_LINE> false_positive_checksum = db.CharField( blank=True, null=True, max_length=32, verbose_name="Ignored checksum", ) <NEW_LINE> changed = db.BooleanField(default=False, db_index=True) <NEW_LINE> created = db.DateTimeField(auto_now=False, auto_now_add=True) <NEW_LINE> modified = db.DateTimeField(auto_now=True, auto_now_add=True) <NEW_LINE> @property <NEW_LINE> def ipaddress(self): <NEW_LINE> <INDENT> ipaddresses = self.ipaddress_set.all() <NEW_LINE> if ipaddresses: <NEW_LINE> <INDENT> return ipaddresses[0] <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def device(self): <NEW_LINE> <INDENT> ipaddress = self.ipaddress <NEW_LINE> if ipaddress: <NEW_LINE> <INDENT> return self.ipaddress.device <NEW_LINE> <DEDENT> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.changed = all(( self.current_checksum, self.current_checksum != self.previous_checksum, self.current_checksum != self.false_positive_checksum, )) <NEW_LINE> return super(ScanSummary, self).save(*args, **kwargs) | For every IP address we prescanned we add scan summary record which holds
checksum and ignored checksum. When the user press `Ignore change` button
we store the current checksum as ignored one. | 625990687b25080760ed88b8 |
class Root(object): <NEW_LINE> <INDENT> __acl__ = [ [Allow, 'admin', ALL_PERMISSIONS], [Allow, 'unlock', 'unlock'], [Allow, Authenticated, 'default'], DENY_ALL, ] <NEW_LINE> def __init__(self, request): <NEW_LINE> <INDENT> self.request = request | Root context.
Defines ACL, not much else. | 625990686e29344779b01dff |
class DocumentClusterAPIView(apps.common.mixins.JqListAPIView, viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = DocumentCluster.objects.all() <NEW_LINE> http_method_names = ['get', 'patch', 'put'] <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> qs = super().get_queryset() <NEW_LINE> document_id = self.request.GET.get('document_id') <NEW_LINE> if document_id: <NEW_LINE> <INDENT> qs = qs.filter(documents_id=document_id) <NEW_LINE> <DEDENT> return qs.order_by('cluster_by', 'using', 'cluster_id') <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action in ('update', 'partial_update'): <NEW_LINE> <INDENT> return DocumentClusterUpdateSerializer <NEW_LINE> <DEDENT> return DocumentClusterSerializer | list: Document Cluster List
retrieve: Retrieve Document Cluster
update: Update Document Cluster (name)
partial_update: Partial Update Document Cluster (name) | 62599068be8e80087fbc0838 |
class RPCProcedureException(RPCFault): <NEW_LINE> <INDENT> def __init__(self, message=None, error_data=None): <NEW_LINE> <INDENT> RPCFault.__init__(self, PROCEDURE_EXCEPTION, message, error_data) | Procedure exception. (PROCEDURE_EXCEPTION) | 625990685fcc89381b266d2d |
class Photo(core_models.TimeStampedModel): <NEW_LINE> <INDENT> caption = models.CharField(max_length=80) <NEW_LINE> file = models.ImageField(upload_to="room_photos") <NEW_LINE> room = models.ForeignKey("Room", related_name="photos", on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.caption | Photo Model Definition | 625990688a43f66fc4bf393f |
class MarginRule(object): <NEW_LINE> <INDENT> def __init__(self, at_keyword, declarations, line, column): <NEW_LINE> <INDENT> self.at_keyword = at_keyword <NEW_LINE> self.declarations = declarations <NEW_LINE> self.line = line <NEW_LINE> self.column = column | A parsed at-rule for margin box.
.. attribute:: at_keyword
One of the 16 following strings:
* ``@top-left-corner``
* ``@top-left``
* ``@top-center``
* ``@top-right``
* ``@top-right-corner``
* ``@bottom-left-corner``
* ``@bottom-left``
* ``@bottom-center``
* ``@bottom-right``
* ``@bottom-right-corner``
* ``@left-top``
* ``@left-middle``
* ``@left-bottom``
* ``@right-top``
* ``@right-middle``
* ``@right-bottom``
.. attribute:: declarations
A list of :class:`~.css21.Declaration` objects.
.. attribute:: line
Source line where this was read.
.. attribute:: column
Source column where this was read. | 625990685fdd1c0f98e5f733 |
@configmapper.map("models", "two_layer_nn") <NEW_LINE> class TwoLayerNN(Module): <NEW_LINE> <INDENT> def __init__(self, embedding, dims): <NEW_LINE> <INDENT> super(TwoLayerNN, self).__init__() <NEW_LINE> self.embedding = embedding <NEW_LINE> self.linear1 = Linear(dims[0], dims[1]) <NEW_LINE> self.relu = ReLU() <NEW_LINE> self.linear2 = Linear(dims[1], dims[2]) <NEW_LINE> <DEDENT> def forward(self, x_input): <NEW_LINE> <INDENT> output = self.embedding(x_input) <NEW_LINE> output = self.linear1(output) <NEW_LINE> output = self.relu(output) <NEW_LINE> x_output = self.linear2(output) <NEW_LINE> return x_output | Implements two layer neural network.
Methods:
forward(x_input): Returns the output of the neural network. | 62599068009cb60464d02ce7 |
class UserError(TerraSyncPyException): <NEW_LINE> <INDENT> ExceptionShortDescription = "User error" | Exception raised when the program is used in an incorrect way. | 6259906863d6d428bbee3e60 |
class User(BaseModel): <NEW_LINE> <INDENT> email = "" <NEW_LINE> password = "" <NEW_LINE> first_name = "" <NEW_LINE> last_name = "" | class User - inherits from BaseModel - Public class attr - email,
password, first_name, last_name | 625990688e71fb1e983bd275 |
class BaseScript(object): <NEW_LINE> <INDENT> def __init__(self, script_obj): <NEW_LINE> <INDENT> if callable(script_obj): <NEW_LINE> <INDENT> self.callable_obj = script_obj <NEW_LINE> params = signature(script_obj).parameters <NEW_LINE> if len(params) > 1: <NEW_LINE> <INDENT> raise UserWarning('function to create base simulation must take a single argument (handle to FDTD CAD).') <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(script_obj, str): <NEW_LINE> <INDENT> if '.fsp' in script_obj and os.path.isfile(script_obj) or '.lms' in script_obj and os.path.isfile(script_obj): <NEW_LINE> <INDENT> self.project_file = os.path.abspath(script_obj) <NEW_LINE> <DEDENT> elif '.lsf' in script_obj and os.path.isfile(script_obj): <NEW_LINE> <INDENT> self.script_str = load_from_lsf(os.path.abspath(script_obj)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.script_str = str(script_obj) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise UserWarning('object for generating base simulation must be a Python function, a file name or a string with a Lumerical script.') <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, cad_handle): <NEW_LINE> <INDENT> return self.eval(cad_handle) <NEW_LINE> <DEDENT> def eval(self, cad_handle): <NEW_LINE> <INDENT> if not isinstance(cad_handle, FDTD) and not isinstance(cad_handle, MODE): <NEW_LINE> <INDENT> raise UserWarning('input must be handle returned by lumapi.FDTD.') <NEW_LINE> <DEDENT> if hasattr(self, 'callable_obj'): <NEW_LINE> <INDENT> return self.callable_obj(cad_handle) <NEW_LINE> <DEDENT> elif hasattr(self, 'project_file'): <NEW_LINE> <INDENT> return cad_handle.load(self.project_file) <NEW_LINE> <DEDENT> elif hasattr(self, 'script_str'): <NEW_LINE> <INDENT> return cad_handle.eval(self.script_str) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError('un-initialized object.') | Proxy class for creating a base simulation. It acts as an interface to place the appropriate call in the FDTD CAD
to build the base simulation depending on the input object. Options are:
1) a Python callable,
2) any visible *.fsp project file,
3) any visible *.lsf script file or
4) a plain string with a Lumerical script.
Parameters:
-----------
:script_obj: executable, file name or plain string. | 625990688da39b475be04999 |
class BaseExtraLogFormatter(logging.Formatter): <NEW_LINE> <INDENT> PREFIX = '_' <NEW_LINE> def _get_extra_attributes(self, record): <NEW_LINE> <INDENT> attributes = dict([(k, v) for k, v in six.iteritems(record.__dict__) if k.startswith(self.PREFIX)]) <NEW_LINE> return attributes <NEW_LINE> <DEDENT> def _get_common_extra_attributes(self, record): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for name in COMMON_ATTRIBUTE_NAMES: <NEW_LINE> <INDENT> value = getattr(record, name, None) <NEW_LINE> if not value: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> result[name] = value <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def _format_extra_attributes(self, attributes): <NEW_LINE> <INDENT> simple_types = (list, dict, int, float) + six.string_types <NEW_LINE> result = {} <NEW_LINE> for key, value in six.iteritems(attributes): <NEW_LINE> <INDENT> if isinstance(value, simple_types): <NEW_LINE> <INDENT> value = value <NEW_LINE> <DEDENT> elif isinstance(value, object): <NEW_LINE> <INDENT> value = serialize_object(obj=value) <NEW_LINE> <DEDENT> result[key] = value <NEW_LINE> <DEDENT> return result | Base class for the log formatters which expect additional context to be passed in the "extra"
dictionary.
For example:
extra={'_id': 'user-1', '_path': '/foo/bar'}
Note: To avoid clashes with standard Python log record attributes, all the keys in the extra
dictionary need to be prefixed with a slash ('_'). | 6259906826068e7796d4e0e7 |
class SingleDesignVisualization(BaseVisualization): <NEW_LINE> <INDENT> def __init__(self,main,only_focused = False): <NEW_LINE> <INDENT> self._watched_design = main.focusedDesign() <NEW_LINE> self._only_focused = only_focused <NEW_LINE> BaseVisualization.__init__(self,main) <NEW_LINE> <DEDENT> def update(self,design): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> self._update(self._watched_design) <NEW_LINE> <DEDENT> def _makeName(self): <NEW_LINE> <INDENT> if self._only_focused: <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> if not self._watched_design: <NEW_LINE> <INDENT> return 'close me' <NEW_LINE> <DEDENT> return self._watched_design.name + ' ' + self.title <NEW_LINE> <DEDENT> def _getDefaultName(self): <NEW_LINE> <INDENT> return self._makeName() <NEW_LINE> <DEDENT> def _update(self,design): <NEW_LINE> <INDENT> if not design or not design.valid(): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.update(design) <NEW_LINE> <DEDENT> <DEDENT> def designFocused(self,design,**kwargs): <NEW_LINE> <INDENT> if self._only_focused: <NEW_LINE> <INDENT> self._watched_design = design <NEW_LINE> self._update(design) <NEW_LINE> <DEDENT> <DEDENT> def designChanged(self,design,**kwargs): <NEW_LINE> <INDENT> if design == self._watched_design: <NEW_LINE> <INDENT> self._update(design) <NEW_LINE> <DEDENT> <DEDENT> def designRenamed(self,design,**kwargs): <NEW_LINE> <INDENT> if self._watched_design == design: <NEW_LINE> <INDENT> self.name = self._makeName() <NEW_LINE> <DEDENT> <DEDENT> def designSelected(self,design,**kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def designDeselected(self,design,**kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def designAdded(self,design,**kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def matchesActiveState(self,only_focused=False,**kwargs): <NEW_LINE> <INDENT> if self._only_focused: <NEW_LINE> <INDENT> return only_focused <NEW_LINE> <DEDENT> return self._watched_design == self.main.focusedDesign() | A convenient base class for Visualizations that only care about a single design.
This design can be "only_focused", which means the design will change to match the
focused design at all times. Otherwise, the design will be whatever is currently
focused at the time of creation and then never changed afterwards. | 6259906832920d7e50bc77f5 |
class TreeGraph: <NEW_LINE> <INDENT> def __init__(self, tree, features): <NEW_LINE> <INDENT> self.tree = tree <NEW_LINE> self.features = features <NEW_LINE> self.__graph = pydot.Dot(graph_type='graph', strict=False) <NEW_LINE> self.__graph_node_count = 0 <NEW_LINE> self.__build_tree() <NEW_LINE> <DEDENT> def __build_tree(self): <NEW_LINE> <INDENT> self.__create_nodes_and_edges(self.tree, self.features) <NEW_LINE> <DEDENT> def __create_nodes_and_edges(self, tree, features, parent_node=None, parent_branch=None): <NEW_LINE> <INDENT> for key, value in tree.items(): <NEW_LINE> <INDENT> if parent_node is None: <NEW_LINE> <INDENT> self.__graph_node_count += 1 <NEW_LINE> name = str(self.__graph_node_count) + str(key) <NEW_LINE> node = pydot.Node(name=name, label=key) <NEW_LINE> self.__graph.add_node(node) <NEW_LINE> self.__create_nodes_and_edges(value, features, parent_node=name) <NEW_LINE> <DEDENT> elif key in features: <NEW_LINE> <INDENT> self.__graph_node_count += 1 <NEW_LINE> name = str(self.__graph_node_count) + str(key) <NEW_LINE> node = pydot.Node(name=name, label=key) <NEW_LINE> self.__graph.add_node(node) <NEW_LINE> edge = pydot.Edge(parent_node, name, label=parent_branch) <NEW_LINE> self.__graph.add_edge(edge) <NEW_LINE> self.__create_nodes_and_edges(value, features, parent_node=name) <NEW_LINE> <DEDENT> elif not isinstance(value, dict): <NEW_LINE> <INDENT> self.__graph_node_count += 1 <NEW_LINE> name = str(self.__graph_node_count) + str(value) <NEW_LINE> node = pydot.Node(name, label=value) <NEW_LINE> self.__graph.add_node(node) <NEW_LINE> edge = pydot.Edge(parent_node, name, label=key) <NEW_LINE> self.__graph.add_edge(edge) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__create_nodes_and_edges(value, features, parent_node=parent_node, parent_branch=key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def export_image(self, file_name="Tree", file_format='png'): <NEW_LINE> <INDENT> if file_format == 'png': <NEW_LINE> <INDENT> self.__graph.write_png("{}.png".format(file_name)) | An tool for creating and exporting graphs from previously built (decision) trees.
Attributes:
tree: A dictionary representing a tree.
features: A list with all features in the tree. | 625990688e7ae83300eea83d |
class AiRecognitionTaskInput(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Definition = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Definition = params.get("Definition") | AI 视频内容识别输入参数类型
| 62599068d6c5a102081e38d6 |
class ChartCLI(object): <NEW_LINE> <INDENT> def __init__(self, parser=None, args=None): <NEW_LINE> <INDENT> self.parser = argparse.ArgumentParser(**{ 'prog': 'Nielsen Chart Exercise', 'description': 'Joshua Powell\'s Technical CSV to Chart exercise.' }) <NEW_LINE> self.parser.add_argument('--file', **{ 'type': str, 'help': 'This can be a relative path `data/sample1.csv`' ' or it can be an asolute path `/[DIR]/1.csv`' }) <NEW_LINE> self.parser.add_argument('--show_values', **{ 'type': utilities.boolean_string, 'help': 'Display bar values, defaults to True', 'default': True }) <NEW_LINE> self.args = self.parser.parse_args() | Command Line Interface.
Setup named application arguments and command line interface help
information.
:param (class) self
The representation of the instantiated Class Instance
:param (class) parser
The name of the application
:param (class) args
The name of the enviornment in which to load the application | 62599068f548e778e596cd3a |
class AccessMethod(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = _('Access Methods') <NEW_LINE> <DEDENT> name = models.CharField(max_length=64) <NEW_LINE> created_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> modified_time = models.DateTimeField(auto_now=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.name}' | Represents an access method. | 6259906897e22403b383c6bc |
class SignOutHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.remove_from_session() <NEW_LINE> self.redirect(self.uri_for('home')) | handles signing out | 6259906899cbb53fe6832694 |
class ActionDescription(object): <NEW_LINE> <INDENT> COPY = "copy" <NEW_LINE> ARCHIVING = "archiving" <NEW_LINE> COMMENT = "content-comment" <NEW_LINE> CREATION = "creation" <NEW_LINE> DELETION = "deletion" <NEW_LINE> EDITION = "edition" <NEW_LINE> REVISION = "revision" <NEW_LINE> STATUS_UPDATE = "status-update" <NEW_LINE> UNARCHIVING = "unarchiving" <NEW_LINE> UNDELETION = "undeletion" <NEW_LINE> MOVE = "move" <NEW_LINE> _ICONS = { "archiving": "fas fa-archive", "content-comment": "far fa-comment", "creation": "fas fa-magic", "deletion": "far trash-alt", "edition": "fas fa-edit", "revision": "fas fa-history", "status-update": "fas fa-random", "unarchiving": "far file-archive", "undeletion": "far fa-trash-alt", "move": "fas fa-arrows-alt", "copy": "far fa-copy", } <NEW_LINE> def __init__(self, id): <NEW_LINE> <INDENT> assert id in ActionDescription.allowed_values() <NEW_LINE> self.id = id <NEW_LINE> self.label = self.id <NEW_LINE> self.fa_icon = ActionDescription._ICONS[id] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def allowed_values(cls): <NEW_LINE> <INDENT> return [ cls.ARCHIVING, cls.COMMENT, cls.CREATION, cls.DELETION, cls.EDITION, cls.REVISION, cls.STATUS_UPDATE, cls.UNARCHIVING, cls.UNDELETION, cls.MOVE, cls.COPY, ] | Allowed status are:
- open
- closed-validated
- closed-invalidated
- closed-deprecated | 62599068435de62698e9d5b9 |
class JenkinsAPIException(Exception): <NEW_LINE> <INDENT> pass | Base class for all errors
| 62599068627d3e7fe0e08638 |
class CachedS3StaticStorage(CachedS3BotoStorage): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs["location"] = "static" <NEW_LINE> super(CachedS3StaticStorage, self).__init__(*args, **kwargs) | Mix of the :class:`S3MediaStorage` and :class:`CachedS3BotoStorage`,
saves files in ``/static`` subdirectory | 6259906844b2445a339b7537 |
class IxnBgpV6L3VpnRoutePropertyEmulation(IxnEmulationHost): <NEW_LINE> <INDENT> def __init__(self, ixnhttp): <NEW_LINE> <INDENT> super(IxnBgpV6L3VpnRoutePropertyEmulation, self).__init__(ixnhttp) <NEW_LINE> <DEDENT> def find(self, vport_name=None, emulation_host=None, **filters): <NEW_LINE> <INDENT> return super(IxnBgpV6L3VpnRoutePropertyEmulation, self).find(["topology","deviceGroup","networkGroup","macPools","ipv6PrefixPools","bgpV6L3VpnRouteProperty"], vport_name, emulation_host, filters) | Generated NGPF bgpV6L3VpnRouteProperty emulation host | 6259906871ff763f4b5e8f55 |
class MemberToGooduser(models.Model): <NEW_LINE> <INDENT> LOW = 'LOW' <NEW_LINE> HIGH = 'HIGH' <NEW_LINE> LEVEL_CHOICES = ( (LOW, 'Low'), (HIGH, 'High') ) <NEW_LINE> member = models.ForeignKey(Member) <NEW_LINE> gooduser = models.ForeignKey(GoodUser) <NEW_LINE> follow_level = models.CharField(max_length=20, null=True, blank=True, choices=LEVEL_CHOICES, default=LOW) <NEW_LINE> rating = models.IntegerField(null=True, blank=True) <NEW_LINE> favorite = models.BooleanField(null=False, blank=False, default=False) | Additional ManyToMany fields for relation of Member and Gooduser | 625990682ae34c7f260ac897 |
class _ProtocolWrapper(protocol.ProcessProtocol): <NEW_LINE> <INDENT> def __init__(self, proto): <NEW_LINE> <INDENT> self.proto = proto <NEW_LINE> <DEDENT> def connectionMade(self): self.proto.connectionMade() <NEW_LINE> def outReceived(self, data): self.proto.dataReceived(data) <NEW_LINE> def processEnded(self, reason): self.proto.connectionLost(reason) | This class wraps a L{Protocol} instance in a L{ProcessProtocol} instance. | 625990681b99ca400229010d |
class DatasetNotFoundError(FusekiClientError): <NEW_LINE> <INDENT> pass | Dataset not found error. | 6259906899cbb53fe6832695 |
class SystemTestSuite(NoseTestSuite): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SystemTestSuite, self).__init__(*args, **kwargs) <NEW_LINE> self.test_id = kwargs.get('test_id', self._default_test_id) <NEW_LINE> self.fasttest = kwargs.get('fasttest', False) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> super(SystemTestSuite, self).__enter__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def cmd(self): <NEW_LINE> <INDENT> cmd = ( './manage.py {system} test --verbosity={verbosity} ' '{test_id} {test_opts} --settings=test {extra} ' '--with-xunit --xunit-file={xunit_report}'.format( system=self.root, verbosity=self.verbosity, test_id=self.test_id, test_opts=self.test_options_flags, extra=self.extra_args, xunit_report=self.report_dir / "nosetests.xml", ) ) <NEW_LINE> return self._under_coverage_cmd(cmd) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _default_test_id(self): <NEW_LINE> <INDENT> default_test_id = ( "{system}/djangoapps/*" " common/djangoapps/*" " openedx/core/djangoapps/*" " openedx/tests/*" " openedx/core/lib/*" ) <NEW_LINE> if self.root in ('lms', 'cms'): <NEW_LINE> <INDENT> default_test_id += " {system}/lib/*" <NEW_LINE> <DEDENT> if self.root == 'lms': <NEW_LINE> <INDENT> default_test_id += " {system}/tests.py" <NEW_LINE> default_test_id += " openedx/core/djangolib" <NEW_LINE> <DEDENT> if self.root == 'cms': <NEW_LINE> <INDENT> default_test_id += " {system}/tests/*" <NEW_LINE> <DEDENT> return default_test_id.format(system=self.root) | TestSuite for lms and cms nosetests | 6259906856ac1b37e63038ba |
class Link( object ): <NEW_LINE> <INDENT> def __init__( self, node1, node2, port1=None, port2=None, intfName1=None, intfName2=None, addr1=None, addr2=None, intf=Intf, cls1=None, cls2=None, params1=None, params2=None, fast=True ): <NEW_LINE> <INDENT> if params1 is None: <NEW_LINE> <INDENT> params1 = {} <NEW_LINE> <DEDENT> if params2 is None: <NEW_LINE> <INDENT> params2 = {} <NEW_LINE> <DEDENT> if params2 is params1: <NEW_LINE> <INDENT> params2 = dict( params1 ) <NEW_LINE> <DEDENT> if port1 is not None: <NEW_LINE> <INDENT> params1[ 'port' ] = port1 <NEW_LINE> <DEDENT> if port2 is not None: <NEW_LINE> <INDENT> params2[ 'port' ] = port2 <NEW_LINE> <DEDENT> if 'port' not in params1: <NEW_LINE> <INDENT> params1[ 'port' ] = node1.newPort() <NEW_LINE> <DEDENT> if 'port' not in params2: <NEW_LINE> <INDENT> params2[ 'port' ] = node2.newPort() <NEW_LINE> <DEDENT> if not intfName1: <NEW_LINE> <INDENT> intfName1 = self.intfName( node1, params1[ 'port' ] ) <NEW_LINE> <DEDENT> if not intfName2: <NEW_LINE> <INDENT> intfName2 = self.intfName( node2, params2[ 'port' ] ) <NEW_LINE> <DEDENT> self.fast = fast <NEW_LINE> if fast: <NEW_LINE> <INDENT> params1.setdefault( 'moveIntfFn', self._ignore ) <NEW_LINE> params2.setdefault( 'moveIntfFn', self._ignore ) <NEW_LINE> self.makeIntfPair( intfName1, intfName2, addr1, addr2, node1, node2, deleteIntfs=False ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.makeIntfPair( intfName1, intfName2, addr1, addr2 ) <NEW_LINE> <DEDENT> if not cls1: <NEW_LINE> <INDENT> cls1 = intf <NEW_LINE> <DEDENT> if not cls2: <NEW_LINE> <INDENT> cls2 = intf <NEW_LINE> <DEDENT> intf1 = cls1( name=intfName1, node=node1, link=self, mac=addr1, **params1 ) <NEW_LINE> intf2 = cls2( name=intfName2, node=node2, link=self, mac=addr2, **params2 ) <NEW_LINE> self.intf1, self.intf2 = intf1, intf2 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _ignore( *args, **kwargs ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def intfName( self, node, n ): <NEW_LINE> <INDENT> assert self <NEW_LINE> return node.name + '-eth' + repr( n ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def makeIntfPair( cls, intfname1, intfname2, addr1=None, addr2=None, node1=None, node2=None, deleteIntfs=True ): <NEW_LINE> <INDENT> assert cls <NEW_LINE> return makeIntfPair( intfname1, intfname2, addr1, addr2, node1, node2, deleteIntfs=deleteIntfs ) <NEW_LINE> <DEDENT> def delete( self ): <NEW_LINE> <INDENT> self.intf1.delete() <NEW_LINE> <DEDENT> def stop( self ): <NEW_LINE> <INDENT> self.delete() <NEW_LINE> <DEDENT> def status( self ): <NEW_LINE> <INDENT> return "(%s %s)" % ( self.intf1.status(), self.intf2.status() ) <NEW_LINE> <DEDENT> def __str__( self ): <NEW_LINE> <INDENT> return '%s<->%s' % ( self.intf1, self.intf2 ) | A basic link is just a veth pair.
Other types of links could be tunnels, link emulators, etc.. | 62599068aad79263cf42ff66 |
class LogMessage(structs.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = jobs_pb2.PrintStr | A log message sent from the client to the server. | 625990683539df3088ecda4f |
class ReporttionUserModel(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(UserProFile, verbose_name='举报用户') <NEW_LINE> activity = models.ForeignKey(ActivityModel, verbose_name='举报活动') <NEW_LINE> contion = models.TextField(verbose_name='举报理由') <NEW_LINE> addtime = models.DateTimeField(default=datetime.now, verbose_name='举报时间') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '用户举报记录' <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.user.name | 用户活动举报记录 | 62599068b7558d5895464b08 |
class HttpProblem(Serializable): <NEW_LINE> <INDENT> type: str = "about:blank" <NEW_LINE> title: HttpProblemTitle = None <NEW_LINE> status: int = None <NEW_LINE> detail: str = None <NEW_LINE> instance: str = None <NEW_LINE> def __post_init__(self): <NEW_LINE> <INDENT> self.Schema._hooks.update({('post_dump', False): ['remove_none_values']}) <NEW_LINE> setattr(self.Schema, "remove_none_values", self.remove_none_values) <NEW_LINE> <DEDENT> @post_dump <NEW_LINE> def remove_none_values(self, data, **kwargs): <NEW_LINE> <INDENT> return { key: value for key, value in data.items() if value is not None } | Store the reasons for failures of the HTTP layers for the API.
The reason is stored as an RFC 7807 Problem. It is a way to define
a uniform, machine-readable details of errors in a HTTP response.
See https://tools.ietf.org/html/rfc7807 for details.
Attributes:
type (str): A URI reference that identifies the
problem type. It should point the Krake API users to the
concrete part of the Krake documentation where the problem
type is explained in detail. Defaults to about:blank.
title (HttpProblemTitle): A short, human-readable summary of
the problem type
status (int): The HTTP status code
detail (str): A human-readable explanation of the problem
instance (str): A URI reference that identifies the specific
occurrence of the problem | 625990688e7ae83300eea83e |
class HistogramsByPileUpCollection(BaseHistCollection): <NEW_LINE> <INDENT> def __init__(self, pileupBins, dimensions=1, initialValue=0): <NEW_LINE> <INDENT> from rootpy.plotting import Hist <NEW_LINE> BaseHistCollection.__init__(self, dimensions, initialValue) <NEW_LINE> self._pileupBins = pileupBins <NEW_LINE> self._pileupHist = Hist(100, 0, 100, name='nVertex') <NEW_LINE> <DEDENT> def add(self, hist_name, bins=[]): <NEW_LINE> <INDENT> from rootpy.plotting import Hist <NEW_LINE> bins = np.array(bins) <NEW_LINE> if bins.size == 0: <NEW_LINE> <INDENT> logger.error( 'No bins specified for histogram {0}'.format(hist_name)) <NEW_LINE> <DEDENT> if hist_name in self[self._pileupBins[0]].keys(): <NEW_LINE> <INDENT> logger.warn('Histogram {0} already exists!'.format(hist_name)) <NEW_LINE> return <NEW_LINE> <DEDENT> hist_names = [] <NEW_LINE> add_name = hist_names.append <NEW_LINE> for puBinLower, puBinUpper in pairwise(self._pileupBins): <NEW_LINE> <INDENT> name = '{0}_pu{1}To{2}'.format( hist_name, puBinLower, puBinUpper) <NEW_LINE> if not self[puBinLower] or not self[puBinLower][hist_name]: <NEW_LINE> <INDENT> add_name(name) <NEW_LINE> self[puBinLower][hist_name] = Hist(bins, name=name) <NEW_LINE> <DEDENT> <DEDENT> logger.debug('Created {0} histograms: {1}'.format( len(hist_names), ', '.join(hist_names))) <NEW_LINE> <DEDENT> def set_pileup(self, pileUp): <NEW_LINE> <INDENT> self._pileUp = pileUp <NEW_LINE> self._pileupHist.fill(pileUp) <NEW_LINE> <DEDENT> def fill(self, hist_name, x, w=1.0): <NEW_LINE> <INDENT> h = self[self._pileUp][hist_name] <NEW_LINE> if not h: <NEW_LINE> <INDENT> msg = 'Histogram {0} does not exist'.format(hist_name) <NEW_LINE> logger.error(msg) <NEW_LINE> raise NameError(msg) <NEW_LINE> <DEDENT> h.fill(x, w) <NEW_LINE> <DEDENT> def _get_pu_bin(self, pileup): <NEW_LINE> <INDENT> if pileup > max(self._pileupBins): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> bins = pairwise(self._pileupBins) <NEW_LINE> for i, (lowerEdge, upperEdge) in enumerate(bins): <NEW_LINE> <INDENT> if pileup >= lowerEdge and pileup < upperEdge: <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> <DEDENT> return 0 <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> real_key = self._get_pu_bin(key) <NEW_LINE> return defaultdict.__getitem__(self, real_key) <NEW_LINE> <DEDENT> def summarise(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def to_root(self, output_file): <NEW_LINE> <INDENT> to_root([self, self._pileupHist], output_file) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_root(input_file): <NEW_LINE> <INDENT> from rootpy.io.pickler import load <NEW_LINE> instance, pileupHist = load(input_file) <NEW_LINE> instance._pileupHist = pileupHist <NEW_LINE> return instance | Specialisation of BaseHistCollection to bin histograms by pileup
:Example:
>>> hists = HistogramsByPileUp(pileupBins=[0,10,15,20,30,999])
>>> pileup=11
>>> # translates pileup=11 to 2nd pileup bin
>>> hists[pileup] = Hist(bins=np.arange(-1, 1.5, 0.05)) | 625990681f5feb6acb16439e |
class ResendInviteView(APIView): <NEW_LINE> <INDENT> permission_classes = [IsAuthenticated & IsInviteOwner] <NEW_LINE> def post(self, request, format=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> invited_member_obj = InvitedMembers.objects.get(pk=request.query_params['invite_id']) <NEW_LINE> <DEDENT> except InvitedMembers.DoesNotExist: <NEW_LINE> <INDENT> return Response({ "error" : "Invite was never sent to the provided email.", "status" : status.HTTP_404_NOT_FOUND }) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user_existance = User.objects.get(email=request.data['email']) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> encoded_jwt = jwt.encode({ 'company_name':request.user.company.company_name, 'email': request.data['email'], 'exp' : time.time() + 10080}, SECRET_KEY, algorithm='HS256').decode('utf-8') <NEW_LINE> request.data['token'] = encoded_jwt <NEW_LINE> serialized_data = InvitedMemberSerializer(invited_member_obj, data=request.data) <NEW_LINE> if serialized_data.is_valid(): <NEW_LINE> <INDENT> invite_instance = serialized_data.save() <NEW_LINE> return Response({ "message":"Invite sent again.", "status" : status.HTTP_200_OK }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serialized_data.errors) <NEW_LINE> <DEDENT> <DEDENT> if not user_existance is None: <NEW_LINE> <INDENT> return Response({ "error" : "User already associated with this email address.", "status" : status.HTTP_400_BAD_REQUEST }) | This view will regenerate token, update it into the InvitedMembers model and resend member invite mail.
This view require a `query_params` as `invite_id` and `email` in JSON body as shown below -
{{host}}/api/users/resend-invite/?invite_id=17
Sample input data -
{
"email":""
} | 62599068a8370b77170f1b75 |
@zope.interface.provider(interfaces.IPluginFactory) <NEW_LINE> class DarwinConfigurator(configurator.ApacheConfigurator): <NEW_LINE> <INDENT> OS_DEFAULTS = dict( server_root="/etc/apache2", vhost_root="/etc/apache2/other", vhost_files="*.conf", logs_root="/var/log/apache2", ctl="apachectl", version_cmd=['apachectl', '-v'], restart_cmd=['apachectl', 'graceful'], conftest_cmd=['apachectl', 'configtest'], enmod=None, dismod=None, le_vhost_ext="-le-ssl.conf", handle_modules=False, handle_sites=False, challenge_location="/etc/apache2/other", MOD_SSL_CONF_SRC=pkg_resources.resource_filename( "certbot_apache", os.path.join("_internal", "options-ssl-apache.conf")) ) | macOS specific ApacheConfigurator override class | 62599068fff4ab517ebcefcc |
class Transition(object): <NEW_LINE> <INDENT> def __init__(self, from_state, to_state, rate, name=None, swap_properties=False, prop_update_fn=None): <NEW_LINE> <INDENT> self.from_state = from_state <NEW_LINE> self.to_state = to_state <NEW_LINE> self.rate = rate <NEW_LINE> self.name = name <NEW_LINE> self.swap_properties = swap_properties <NEW_LINE> self.prop_update_fn = prop_update_fn | A transition from one state to another.
Represents a transition from one state ("from_state") to another
("to_state") at a link. The transition probability is represented by a rate
parameter "rate", with dimensions of 1/T. The probability distribution of
time until the transition event occurs is exponentional with mean 1/rate.
The optional name parameter allows the caller to assign a name to any given
transition.
Note that from_state and to_state can now be either integer IDs for the
standardised ordering of the link states (as before), or tuples explicitly
describing the node state at each end, and the orientation.
Orientation is 0: horizontal, L-R; 1: vertical, bottom-top.
For such a tuple, order is (left/bottom, right/top, orientation).
Transition() constructor sets 3 required properties and 2 optional
properties for a transition from one cell pair to another.
Parameters
----------
from_state : int
Code for the starting state of the cell pair (link)
to_state : int
Code for the new state of the cell pair (link)
rate : float
Average rate at which this transition occurs (dimension of 1/time)
name : string (optional)
Name for this transition
swap_properties : bool (optional)
Flag: should properties be exchanged between the two cells? | 6259906897e22403b383c6be |
class DirectExchange(ExchangeType): <NEW_LINE> <INDENT> type = 'direct' <NEW_LINE> def lookup(self, table, exchange, routing_key, default): <NEW_LINE> <INDENT> return { queue for rkey, _, queue in table if rkey == routing_key } <NEW_LINE> <DEDENT> def deliver(self, message, exchange, routing_key, **kwargs): <NEW_LINE> <INDENT> _lookup = self.channel._lookup <NEW_LINE> _put = self.channel._put <NEW_LINE> for queue in _lookup(exchange, routing_key): <NEW_LINE> <INDENT> _put(queue, message, **kwargs) | The `direct` exchange routes based on exact routing keys. | 6259906856b00c62f0fb407f |
class EpsilonGreedyPolicy(ActionValuePolicy): <NEW_LINE> <INDENT> def __init__(self, nb_bandits: int, debug: bool, initial_q_value=0.0, epsilon=0.1): <NEW_LINE> <INDENT> super().__init__(nb_bandits, debug, initial_q_value) <NEW_LINE> self.epsilon: float = epsilon <NEW_LINE> <DEDENT> def select_action(self) -> int: <NEW_LINE> <INDENT> if np.random.uniform() > self.epsilon: <NEW_LINE> <INDENT> bandit_idx = np.argmax(self.q) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bandit_idx = np.random.randint(0, self.nb_bandits - 1) <NEW_LINE> <DEDENT> return int(bandit_idx) <NEW_LINE> <DEDENT> def update(self, bandit: int, reward: float) -> None: <NEW_LINE> <INDENT> self.visits[bandit] += 1 <NEW_LINE> q_value = self.q[bandit] + (1 / self.visits[bandit]) * (reward - self.q[bandit]) <NEW_LINE> self.q[bandit] = q_value | Select an action greedily with epsilon chance of a random action. | 62599068cb5e8a47e493cd5c |
class Evolvable(Named): <NEW_LINE> <INDENT> def mutate(self, **args): <NEW_LINE> <INDENT> abstractMethod() <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) <NEW_LINE> <DEDENT> def randomize(self): <NEW_LINE> <INDENT> abstractMethod() <NEW_LINE> <DEDENT> def newSimilarInstance(self): <NEW_LINE> <INDENT> res = self.copy() <NEW_LINE> res.randomize() <NEW_LINE> return res | The interface for all Evolvables, i.e. which implement mutation, randomize and copy operators. | 6259906899fddb7c1ca639a8 |
class loop7(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.loops7" <NEW_LINE> bl_label = "origin to selected / in objectmode" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> bpy.ops.view3d.snap_cursor_to_selected() <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.object.origin_set(type='ORIGIN_CURSOR') <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> return {'FINISHED'} | set origin to selected / objectmode | 62599068dd821e528d6da55a |
class WebAPIAuthBackend(object): <NEW_LINE> <INDENT> www_auth_scheme = None <NEW_LINE> SENSITIVE_CREDENTIALS_RE = re.compile('api|token|key|secret|password|signature', re.I) <NEW_LINE> def get_auth_headers(self, request): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def authenticate(self, request): <NEW_LINE> <INDENT> credentials = self.get_credentials(request) <NEW_LINE> if not credentials: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(credentials, dict): <NEW_LINE> <INDENT> result = self.login_with_credentials(request, **credentials) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance(credentials, tuple) <NEW_LINE> result = credentials <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def get_credentials(self, request): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def login_with_credentials(self, request, **credentials): <NEW_LINE> <INDENT> result = self.validate_credentials(request, **credentials) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> log_extra = { 'request': request, } <NEW_LINE> cleaned_credentials = self.clean_credentials_for_display(credentials) <NEW_LINE> logging.debug( 'Attempting authentication on API: %s', ', '.join([ '%s=%s' % pair for pair in six.iteritems(cleaned_credentials) ]), extra=log_extra) <NEW_LINE> user = auth.authenticate(**credentials) <NEW_LINE> if user and user.is_active: <NEW_LINE> <INDENT> auth.login(request, user) <NEW_LINE> return True, None, None <NEW_LINE> <DEDENT> logging.debug('API Login failed. No valid user found.', extra=log_extra) <NEW_LINE> auth.logout(request) <NEW_LINE> return False, None, None <NEW_LINE> <DEDENT> def validate_credentials(self, request, **credentials): <NEW_LINE> <INDENT> if (request.user.is_authenticated() and request.user.username == credentials.get('username')): <NEW_LINE> <INDENT> return True, None, None <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def clean_credentials_for_display(self, credentials): <NEW_LINE> <INDENT> clean_credentials = {} <NEW_LINE> for key, value in six.iteritems(credentials): <NEW_LINE> <INDENT> if self.SENSITIVE_CREDENTIALS_RE.search(key): <NEW_LINE> <INDENT> clean_credentials[key] = '************' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> clean_credentials[key] = value <NEW_LINE> <DEDENT> <DEDENT> return clean_credentials | Handles a form of authentication for the web API.
This can be overridden to provide custom forms of authentication, or to
support multiple types of authentication.
More than one authentication backend can be used with the web API. In that
case, the client can make the determination about which to use.
Auth backends generally need to only override the
:py:meth:`get_credentials` method, though more specialized ones may
override other methods as well.
They must also provide :py:attr:`www_auth_scheme` which is a
``WWW-Authenticate`` scheme value. | 62599068be8e80087fbc083c |
class CapturePointConstraint(UnilateralConstraint, JointVelocityConstraint): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> super(CapturePointConstraint, self).__init__(model) | Capture Point constraint.
Definition: "For a biped in state :math:`x`, a Capture Point (CP) :math:`P`, is a point on the ground such that
if the biped covers :math:`P` (makes its base of support include :math:`P`), either with its stance foot or by
stepping to :math:`P` in a single step, and then maintains its Center of Pressure (CoP) to lie on :math:`P`, then
there exists a safe feasible trajectory leading to a capture state (i.e. a state in which the kinetic energy of
the biped is zero and can remain zero with suitable joint torque (note that the CoM must lie above the CoP in a
capture state))." [1] "Intuitively, the CP is the point on the floor onto which the robot has to step to come
to a complete rest" [2].
References:
- [1] "Capture Point: A Step toward Humanoid PushRecovery", Pratt et al., 2006
- [2] "Bipedal walking control based on Capture Point dynamics", Englsberger et al., 2011 | 6259906845492302aabfdc89 |
class IntegrityMonitoringPolicyExtension(object): <NEW_LINE> <INDENT> swagger_types = { 'state': 'str', 'rule_ids': 'list[int]' } <NEW_LINE> attribute_map = { 'state': 'state', 'rule_ids': 'ruleIDs' } <NEW_LINE> def __init__(self, state=None, rule_ids=None): <NEW_LINE> <INDENT> self._state = None <NEW_LINE> self._rule_ids = None <NEW_LINE> self.discriminator = None <NEW_LINE> if state is not None: <NEW_LINE> <INDENT> self.state = state <NEW_LINE> <DEDENT> if rule_ids is not None: <NEW_LINE> <INDENT> self.rule_ids = rule_ids <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @state.setter <NEW_LINE> def state(self, state): <NEW_LINE> <INDENT> allowed_values = ["real-time", "on", "off"] <NEW_LINE> if state not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `state` ({0}), must be one of {1}" .format(state, allowed_values) ) <NEW_LINE> <DEDENT> self._state = state <NEW_LINE> <DEDENT> @property <NEW_LINE> def rule_ids(self): <NEW_LINE> <INDENT> return self._rule_ids <NEW_LINE> <DEDENT> @rule_ids.setter <NEW_LINE> def rule_ids(self, rule_ids): <NEW_LINE> <INDENT> self._rule_ids = rule_ids <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(IntegrityMonitoringPolicyExtension, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IntegrityMonitoringPolicyExtension): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 625990681f5feb6acb1643a0 |
class ManageTest(integration.ShellCase): <NEW_LINE> <INDENT> def test_active(self): <NEW_LINE> <INDENT> ret = self.run_run_plus('jobs.active') <NEW_LINE> self.assertFalse(ret['fun']) <NEW_LINE> self.assertFalse(ret['out'][1]) <NEW_LINE> <DEDENT> def test_lookup_jid(self): <NEW_LINE> <INDENT> ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394') <NEW_LINE> self.assertTrue(str(ret['fun']).startswith('Job 23974239742394')) <NEW_LINE> self.assertFalse(ret['out'][1]) <NEW_LINE> <DEDENT> def test_list_jobs(self): <NEW_LINE> <INDENT> ret = self.run_run_plus('jobs.list_jobs') <NEW_LINE> self.assertIsInstance(ret['fun'], dict) | Test the manage runner | 62599068d6c5a102081e38da |
class PyWarningsLoggingFilter(object): <NEW_LINE> <INDENT> label = "py.warnings:" <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def filter(self, record): <NEW_LINE> <INDENT> record.msg = '%s %s' % (self.label, record.msg) <NEW_LINE> return True | Add a prefix to the messages from py.warnings.
To help distinquish log messages from python and pygtk 'warnings',
while avoiding changing the log format. | 62599068a8370b77170f1b77 |
class StreamWrapper(object): <NEW_LINE> <INDENT> def __init__(self,stream): <NEW_LINE> <INDENT> if not hasattr(stream,"readline") and hasattr(stream,"recv"): <NEW_LINE> <INDENT> stream = stream.makefile('rb', 0) <NEW_LINE> <DEDENT> self.stream = stream <NEW_LINE> <DEDENT> def readline(self,size=None): <NEW_LINE> <INDENT> return self.stream.readline(size) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> ln = self.readline() <NEW_LINE> while ln != "": <NEW_LINE> <INDENT> yield ln <NEW_LINE> ln = self.readline() <NEW_LINE> <DEDENT> <DEDENT> def write(self,data): <NEW_LINE> <INDENT> return self.stream.write(data) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.stream.close() | Base class for wrapping of streams. | 625990684f88993c371f10f8 |
class Introduction(Page): <NEW_LINE> <INDENT> def vars_for_template(self): <NEW_LINE> <INDENT> self.group.set_payoffs() <NEW_LINE> return {'x': self.player.contribution} <NEW_LINE> <DEDENT> def is_displayed(self): <NEW_LINE> <INDENT> return self.round_number==1 | Description of the game: How to play and returns expected | 625990683eb6a72ae038be13 |
class LOOT_420: <NEW_LINE> <INDENT> pass | Skull of the Man'ari | 625990683317a56b869bf11c |
class ColaProcessor(DataProcessor): <NEW_LINE> <INDENT> def get_train_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") <NEW_LINE> <DEDENT> def get_dev_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") <NEW_LINE> <DEDENT> def get_test_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") <NEW_LINE> <DEDENT> def get_labels(self): <NEW_LINE> <INDENT> return ["0", "1"] <NEW_LINE> <DEDENT> def _create_examples(self, lines, set_type): <NEW_LINE> <INDENT> examples = [] <NEW_LINE> for (i, line) in enumerate(lines): <NEW_LINE> <INDENT> if set_type == "test" and i == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> guid = "%s-%s" % (set_type, i) <NEW_LINE> if set_type == "test": <NEW_LINE> <INDENT> text_a = tokenization.convert_to_unicode(line[1]) <NEW_LINE> label = tokenization.convert_to_unicode(line[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text_a = tokenization.convert_to_unicode(line[1]) <NEW_LINE> label = tokenization.convert_to_unicode(line[0]) <NEW_LINE> <DEDENT> examples.append( InputExample(guid=guid, text_a=text_a, text_b=None, label=label)) <NEW_LINE> <DEDENT> return examples | Processor for the CoLA data set (GLUE version). | 62599068460517430c432c2e |
class EventPanelView(grok.View): <NEW_LINE> <INDENT> grok.context(ISwimmingFolder) <NEW_LINE> grok.require('zope2.View') <NEW_LINE> grok.name('event_panel_view') <NEW_LINE> def update(self): <NEW_LINE> <INDENT> self.haveContents = len(self.folder_contents()) > 0 <NEW_LINE> <DEDENT> @memoize <NEW_LINE> def folder_contents(self): <NEW_LINE> <INDENT> return get_folder_contents(self) | A new view for a swimming folder.
The associated template is found in swimmingfolder_templates/event_panel_view.pt. | 62599068435de62698e9d5bd |
class AppSpiderResponse(object): <NEW_LINE> <INDENT> def __init__(self, message, success, data=None, response_code=-1): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.data = data <NEW_LINE> self.success = success <NEW_LINE> self.response_code = response_code <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.data: <NEW_LINE> <INDENT> return str(self.data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> <DEDENT> def binary(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def id(self): <NEW_LINE> <INDENT> if self.response_code == 400: <NEW_LINE> <INDENT> raise ValueError('Object not created:' + json.dumps(self.data, sort_keys=True, indent=4, separators=(',', ': '))) <NEW_LINE> <DEDENT> return int(self.data) <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> return self.data["TotalCount"] <NEW_LINE> <DEDENT> def is_success(self): <NEW_LINE> <INDENT> data = None <NEW_LINE> try: <NEW_LINE> <INDENT> data = self.data["IsSuccess"] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> data = self.data <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def error(self): <NEW_LINE> <INDENT> return self.data["ErrorMessage"] <NEW_LINE> <DEDENT> def data_json(self, pretty=False): <NEW_LINE> <INDENT> if pretty: <NEW_LINE> <INDENT> return json.dumps(self.data, sort_keys=True, indent=4, separators=(',', ': ')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return json.dumps(self.data) | Container for all AppSpider Enterprise API responses, even errors. | 6259906856b00c62f0fb4081 |
class InputError(Exception): <NEW_LINE> <INDENT> pass | Raised when but user input is processed. | 62599068091ae356687063e4 |
class TestAnsibleModuleWarnDeprecate(unittest.TestCase): <NEW_LINE> <INDENT> def test_warn(self): <NEW_LINE> <INDENT> args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) <NEW_LINE> with swap_stdin_and_argv(stdin_data=args): <NEW_LINE> <INDENT> with swap_stdout(): <NEW_LINE> <INDENT> ansible.module_utils.basic._ANSIBLE_ARGS = None <NEW_LINE> am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) <NEW_LINE> am._name = 'unittest' <NEW_LINE> am.warn('warning1') <NEW_LINE> with self.assertRaises(SystemExit): <NEW_LINE> <INDENT> am.exit_json(warnings=['warning2']) <NEW_LINE> <DEDENT> self.assertEquals(json.loads(sys.stdout.getvalue())['warnings'], ['warning1', 'warning2']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_deprecate(self): <NEW_LINE> <INDENT> args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) <NEW_LINE> with swap_stdin_and_argv(stdin_data=args): <NEW_LINE> <INDENT> with swap_stdout(): <NEW_LINE> <INDENT> ansible.module_utils.basic._ANSIBLE_ARGS = None <NEW_LINE> am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) <NEW_LINE> am._name = 'unittest' <NEW_LINE> am.deprecate('deprecation1') <NEW_LINE> am.deprecate('deprecation2', '2.3') <NEW_LINE> with self.assertRaises(SystemExit): <NEW_LINE> <INDENT> am.exit_json(deprecations=['deprecation3', ('deprecation4', '2.4')]) <NEW_LINE> <DEDENT> output = json.loads(sys.stdout.getvalue()) <NEW_LINE> self.assertTrue('warnings' not in output or output['warnings'] == []) <NEW_LINE> self.assertEquals(output['deprecations'], [ {u'msg': u'deprecation1', u'version': None}, {u'msg': u'deprecation2', u'version': '2.3'}, {u'msg': u'deprecation3', u'version': None}, {u'msg': u'deprecation4', u'version': '2.4'}, ]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_deprecate_without_list(self): <NEW_LINE> <INDENT> args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) <NEW_LINE> with swap_stdin_and_argv(stdin_data=args): <NEW_LINE> <INDENT> with swap_stdout(): <NEW_LINE> <INDENT> ansible.module_utils.basic._ANSIBLE_ARGS = None <NEW_LINE> am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) <NEW_LINE> am._name = 'unittest' <NEW_LINE> with self.assertRaises(SystemExit): <NEW_LINE> <INDENT> am.exit_json(deprecations='Simple deprecation warning') <NEW_LINE> <DEDENT> output = json.loads(sys.stdout.getvalue()) <NEW_LINE> self.assertTrue('warnings' not in output or output['warnings'] == []) <NEW_LINE> self.assertEquals(output['deprecations'], [ {u'msg': u'Simple deprecation warning', u'version': None}, ]) | Test the AnsibleModule Warn Method | 6259906816aa5153ce401c8c |
class Subjects(db.Model): <NEW_LINE> <INDENT> __tablename__ = "subjects" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> title = db.Column(db.String(2), unique=True) <NEW_LINE> description = db.Column(db.Text) <NEW_LINE> date_created = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> date_modified = db.Column(db.DateTime, onupdate=datetime.utcnow) <NEW_LINE> created_by = db.Column(db.Integer, db.ForeignKey("users.id")) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Subjects: %r>" % self.title | Creates subjects | 625990682c8b7c6e89bd4f99 |
class RandomAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, reversi, turn): <NEW_LINE> <INDENT> self.reversi = reversi <NEW_LINE> self.color = turn <NEW_LINE> <DEDENT> def get_action(self, state, legal_moves): <NEW_LINE> <INDENT> if not legal_moves: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return random.choice(legal_moves) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def observe_win(self, winner): <NEW_LINE> <INDENT> pass | An agent that simply chooses
totally random legal moves. | 6259906871ff763f4b5e8f59 |
class MultiHeadAttention(nn.Module): <NEW_LINE> <INDENT> def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.n_head = n_head <NEW_LINE> self.d_k = d_k <NEW_LINE> self.d_v = d_v <NEW_LINE> self.w_qs = nn.Linear(d_model, n_head * d_k, bias=True) <NEW_LINE> self.w_ks = nn.Linear(d_model, n_head * d_k, bias=True) <NEW_LINE> self.w_vs = nn.Linear(d_model, n_head * d_v, bias=True) <NEW_LINE> self.fc = nn.Linear(n_head * d_v, d_model, bias=True) <NEW_LINE> self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5) <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> self.layer_norm = nn.LayerNorm(d_model, eps=1e-6) <NEW_LINE> <DEDENT> def forward(self, q, k, v, mask=None): <NEW_LINE> <INDENT> d_k, d_v, n_head = self.d_k, self.d_v, self.n_head <NEW_LINE> sz_b, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1) <NEW_LINE> residual = q <NEW_LINE> q = self.w_qs(q) <NEW_LINE> k = self.w_ks(k) <NEW_LINE> v = self.w_vs(v) <NEW_LINE> q = q.view(sz_b, len_q, n_head, d_k) <NEW_LINE> k = k.view(sz_b, len_k, n_head, d_k) <NEW_LINE> v = v.view(sz_b, len_v, n_head, d_v) <NEW_LINE> q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2) <NEW_LINE> q, attn = self.attention(q, k, v, mask=mask) <NEW_LINE> q = q.transpose(1, 2).contiguous().view(sz_b, len_q, -1) <NEW_LINE> q = self.dropout(self.fc(q)) <NEW_LINE> out = residual + q <NEW_LINE> out = self.layer_norm(out) <NEW_LINE> return out, attn | Multi-Head Attention module | 6259906899fddb7c1ca639a9 |
class User(BaseModel): <NEW_LINE> <INDENT> username: str = Field(..., min_length=1, max_length=256) <NEW_LINE> password: str = Field( ..., min_length=settings.MINIMUM_PASSWORD_LENGTH, max_length=settings.MAXIMUM_PASSWORD_LENGTH, ) <NEW_LINE> email: EmailStr <NEW_LINE> full_name: str = Field("", max_length=100) <NEW_LINE> phone_number: str = Field("", min_length=9, max_length=15) <NEW_LINE> become: UserType <NEW_LINE> @validator("username") <NEW_LINE> def extra_validation_on_username(cls: "User", value: str) -> str: <NEW_LINE> <INDENT> validators.validate_reserved_name(value=value, exception_class=ValueError) <NEW_LINE> validators.validate_confusables(value=value, exception_class=ValueError) <NEW_LINE> return value <NEW_LINE> <DEDENT> @validator("password") <NEW_LINE> def extra_validation_on_password(cls: "User", value: str) -> str: <NEW_LINE> <INDENT> result = pwned.pwned_password(password=value) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> raise ValueError("Connection error, try again") <NEW_LINE> <DEDENT> if result > 0: <NEW_LINE> <INDENT> raise ValueError( f"Oh no — pwned! This password has been seen {result} times before" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> @validator("email") <NEW_LINE> def extra_validation_on_email(cls: "User", value: str) -> str: <NEW_LINE> <INDENT> local_part, domain = value.split("@") <NEW_LINE> validators.validate_reserved_name(value=local_part, exception_class=ValueError) <NEW_LINE> validators.validate_confusables_email( domain=domain, local_part=local_part, exception_class=ValueError ) <NEW_LINE> return value <NEW_LINE> <DEDENT> @validator("phone_number") <NEW_LINE> def extra_validation_on_phone_number(cls: "User", value: str) -> str: <NEW_LINE> <INDENT> result = re.match(r"^\+?1?\d{9,15}$", value) <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise ValueError( "Phone number must be entered in the format: '+251999999999." " Up to 15 digits allowed." ) <NEW_LINE> <DEDENT> return value | Schema for user sign up data. | 625990685fcc89381b266d30 |
class Email(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'emails' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> created_at = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) <NEW_LINE> updated_at = db.Column(db.DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE')) <NEW_LINE> type = db.Column(db.String(16), nullable=False) <NEW_LINE> address = db.Column(db.String(64), nullable=False) <NEW_LINE> is_primary = db.Column(db.Boolean, nullable=False, default=False) | Represents a user's email address | 62599068442bda511e95d932 |
class OffsetHierarchyFilter(OffsetFilter): <NEW_LINE> <INDENT> derivationStr = 'getElementsByOffsetInHierarchy' <NEW_LINE> def __call__(self, e, iterator): <NEW_LINE> <INDENT> s = iterator.srcStream <NEW_LINE> if s is e: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not hasattr(iterator, 'iteratorStartOffsetInHierarchy'): <NEW_LINE> <INDENT> raise FilterException('Can only run OffsetHierarchyFilter on a RecursiveIterator') <NEW_LINE> <DEDENT> offset = s.elementOffset(e) + iterator.iteratorStartOffsetInHierarchy <NEW_LINE> return self.isElementOffsetInRange(e, offset, stopAfterEnd=False) | see iterator.getElementsByOffsetInHierarchy()
Finds elements that match a given offset range in the hierarchy.
Do not call .stream() afterwards or unstable results can occur. | 625990687047854f46340b68 |
class J(DStatistic): <NEW_LINE> <INDENT> def __init__(self, pp, n=100, intervals=10, dmin=0.0, dmax=None, d=None): <NEW_LINE> <INDENT> res = _j(pp, n, intervals, dmin, dmax, d) <NEW_LINE> self.d = res[:, 0] <NEW_LINE> self.j = self._stat = res[:, 1] <NEW_LINE> self.ev = self.j / self.j <NEW_LINE> super(J, self).__init__(name="J") | Estimates the J function for a point pattern :cite:`VanLieshout1996`
Parameters
----------
pp : :class:`.PointPattern`
Point Pattern instance.
n : int
Number of empty space points (random points).
intervals : int
The length of distance domain sequence.
dmin : float
The minimum of the distance domain.
dmax : float
The maximum of the distance domain.
d : sequence
The distance domain sequence.
If d is specified, intervals, dmin and dmax are ignored.
Attributes
----------
d : array
The distance domain sequence.
j : array
F function over d.
Notes
-----
The :math:`J` function is a ratio of the hazard functions defined for
:math:`G` and :math:`F`:
.. math::
J(d) = \frac{1-G(d) }{1-F(d)}
where :math:`G(d)` is the nearest neighbor distance distribution function
(see :class:`G`)
and :math:`F(d)` is the empty space function (see :class:`F`).
For a CSR process the J function equals 1. Empirical values larger than 1
are indicative of uniformity, while values below 1 suggest clustering. | 62599068be8e80087fbc083e |
class TextParser(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.processors = [] <NEW_LINE> for l in inspect.getmembers(processors, inspect.isclass): <NEW_LINE> <INDENT> c = getattr(processors, l[0]) <NEW_LINE> self.register_processor(c()) <NEW_LINE> <DEDENT> <DEDENT> def register_processor(self, processor): <NEW_LINE> <INDENT> self.processors.append(processor) <NEW_LINE> <DEDENT> def parse_text(self, msg): <NEW_LINE> <INDENT> for processor in self.processors: <NEW_LINE> <INDENT> if processor.match(msg): <NEW_LINE> <INDENT> break | docstring for TextParser | 6259906856ac1b37e63038bc |
class MUX(Ghost): <NEW_LINE> <INDENT> def __init__(self,selfobj): <NEW_LINE> <INDENT> Ghost.__init__(self, selfobj) <NEW_LINE> selfobj.IAm = 'PartOMagic.MUX' <NEW_LINE> selfobj.addProperty('App::PropertyBool','FlattenCompound',"MUX","If true, compound nesting does not follow nesting of Parts. If False, compound nesting follows nexting of parts.") <NEW_LINE> selfobj.addProperty('App::PropertyLinkListGlobal', 'ExclusionList', "MUX", 'List of objects to exclude from compound') <NEW_LINE> selfobj.addProperty('App::PropertyEnumeration', 'Traversal', "MUX", 'Sets if to look for shapes in nested containers') <NEW_LINE> selfobj.Traversal = ['Direct children', 'Recursive'] <NEW_LINE> selfobj.Traversal = 'Recursive' <NEW_LINE> <DEDENT> def execute(self,selfobj): <NEW_LINE> <INDENT> transform = self.getTransform(selfobj) <NEW_LINE> selfobj.Shape = compoundFromAssembly(selfobj.Base, selfobj.FlattenCompound, selfobj.ExclusionList, recursive= selfobj.Traversal == 'Recursive') <NEW_LINE> toleave,toenter = self.path <NEW_LINE> if True: <NEW_LINE> <INDENT> selfobj.Placement = transform.multiply(selfobj.Base.Placement) <NEW_LINE> selfobj.setEditorMode('Placement', 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> selfobj.setEditorMode('Placement', 0) <NEW_LINE> <DEDENT> path = '' <NEW_LINE> for cnt in toenter: <NEW_LINE> <INDENT> path += '../' <NEW_LINE> <DEDENT> for cnt in toleave: <NEW_LINE> <INDENT> path += cnt.Name + '/' <NEW_LINE> <DEDENT> labelf = u'{name} {label} from {path}' if toleave or toenter else u'{name} {label}' <NEW_LINE> selfobj.Label = labelf.format(label= selfobj.Base.Label, name= selfobj.Name, path= path[:-1]) | MUX object, converts assembly into a compound | 625990685fdd1c0f98e5f739 |
class Blight(Spell): <NEW_LINE> <INDENT> name = "Blight" <NEW_LINE> level = 4 <NEW_LINE> casting_time = "1 action" <NEW_LINE> casting_range = "30 feet" <NEW_LINE> components = ('V', 'S') <NEW_LINE> materials = """""" <NEW_LINE> duration = "Instantaneous" <NEW_LINE> ritual = False <NEW_LINE> magic_school = "Necromancy" <NEW_LINE> classes = ('Druid', 'Sorcerer', 'Warlock', 'Wizard') | Necromantic energy washes over a creature of your choice that you can see within
range, draining moisture and vitality from it. The target must make a
Constitution saving throw. The target takes 8d8 necrotic damage on a failed
save, or half as much damage on a successful one. This spell has no effect on
undead or constructs.
If you target a plant creature or a magical plant, it
makes the saving throw with disadvantage, and the spell deals maximum damage to
it.
If you target a nonmagical plant that isn’t a creature, such as a tree or
shrub, it doesn’t make a saving throw; it simply withers and dies.
At Higher
Levels: When you cast this spell using a spell slot of 5th level or higher, the
damage increases by 1d8 for each slot level above 4th. | 625990685166f23b2e244b86 |
class TagAdmin(sqla.ModelView): <NEW_LINE> <INDENT> def is_accessible(self): <NEW_LINE> <INDENT> return is_admin(current_user) | Defines the Tag administration page | 62599068f548e778e596cd3f |
class PyssStateObject(PyssOwnerObject): <NEW_LINE> <INDENT> def __init__(self, entityType, label=None, owner=None): <NEW_LINE> <INDENT> super(PyssStateObject, self).__init__(entityType, label=label, owner=owner) <NEW_LINE> self[ON_STATE_CHANGE] = {} <NEW_LINE> <DEDENT> def existsHandlerOnStateChange(self, handlerName): <NEW_LINE> <INDENT> return handlerName in self[ON_STATE_CHANGE].keys() <NEW_LINE> <DEDENT> def addHandlerOnStateChange(self, handlerName=None, handler=None): <NEW_LINE> <INDENT> if handlerName is None: <NEW_LINE> <INDENT> raise pyssobject.ErrorIsNone("handlerName is None") <NEW_LINE> <DEDENT> if handlerName not in self[ON_STATE_CHANGE].keys(): <NEW_LINE> <INDENT> self[ON_STATE_CHANGE][handlerName] = handler <NEW_LINE> <DEDENT> <DEDENT> def removeHandlerOnStateChange(self, handlerName): <NEW_LINE> <INDENT> if handlerName in self[ON_STATE_CHANGE].keys(): <NEW_LINE> <INDENT> self[ON_STATE_CHANGE].pop(handlerName, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warn("Попытка удаления несуществующего обработчика изменения состояния") <NEW_LINE> <DEDENT> <DEDENT> def fireHandlerOnStateChange(self, oldState): <NEW_LINE> <INDENT> for h in self[ON_STATE_CHANGE].itervalues(): <NEW_LINE> <INDENT> h(self, oldState) | Базовый класс для объектов модели с обработкой состояний
Args:
entityType - задает строку, идентифицирующую объект модели.
objectNumber - номер объекта
label - задаёт метку, по которой можно найти объект в контейнерах модели:
Атрибуты базового класса объекта модели (в дополнение к атрибутам pyssobject.PyssObject):
bl = <наследник от PyssOwnerObject>(...)
bl[OWNER] - объект-владелец | 625990680a50d4780f70699a |
class BasisDependentMul(BasisDependent, Mul): <NEW_LINE> <INDENT> def __new__(cls, *args, **options): <NEW_LINE> <INDENT> from sympy.vector import Cross, Dot, Curl, Gradient <NEW_LINE> count = 0 <NEW_LINE> measure_number = S.One <NEW_LINE> zeroflag = False <NEW_LINE> extra_args = [] <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> if isinstance(arg, cls._zero_func): <NEW_LINE> <INDENT> count += 1 <NEW_LINE> zeroflag = True <NEW_LINE> <DEDENT> elif arg == S.Zero: <NEW_LINE> <INDENT> zeroflag = True <NEW_LINE> <DEDENT> elif isinstance(arg, (cls._base_func, cls._mul_func)): <NEW_LINE> <INDENT> count += 1 <NEW_LINE> expr = arg._base_instance <NEW_LINE> measure_number *= arg._measure_number <NEW_LINE> <DEDENT> elif isinstance(arg, cls._add_func): <NEW_LINE> <INDENT> count += 1 <NEW_LINE> expr = arg <NEW_LINE> <DEDENT> elif isinstance(arg, (Cross, Dot, Curl, Gradient)): <NEW_LINE> <INDENT> extra_args.append(arg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> measure_number *= arg <NEW_LINE> <DEDENT> <DEDENT> if count > 1: <NEW_LINE> <INDENT> raise ValueError("Invalid multiplication") <NEW_LINE> <DEDENT> elif count == 0: <NEW_LINE> <INDENT> return Mul(*args, **options) <NEW_LINE> <DEDENT> if zeroflag: <NEW_LINE> <INDENT> return cls.zero <NEW_LINE> <DEDENT> if isinstance(expr, cls._add_func): <NEW_LINE> <INDENT> newargs = [cls._mul_func(measure_number, x) for x in expr.args] <NEW_LINE> return cls._add_func(*newargs) <NEW_LINE> <DEDENT> obj = super().__new__(cls, measure_number, expr._base_instance, *extra_args, **options) <NEW_LINE> if isinstance(obj, Add): <NEW_LINE> <INDENT> return cls._add_func(*obj.args) <NEW_LINE> <DEDENT> obj._base_instance = expr._base_instance <NEW_LINE> obj._measure_number = measure_number <NEW_LINE> assumptions = {'commutative': True} <NEW_LINE> obj._assumptions = StdFactKB(assumptions) <NEW_LINE> obj._components = {expr._base_instance: measure_number} <NEW_LINE> obj._sys = expr._base_instance._sys <NEW_LINE> return obj <NEW_LINE> <DEDENT> def _sympystr(self, printer): <NEW_LINE> <INDENT> measure_str = printer._print(self._measure_number) <NEW_LINE> if ('(' in measure_str or '-' in measure_str or '+' in measure_str): <NEW_LINE> <INDENT> measure_str = '(' + measure_str + ')' <NEW_LINE> <DEDENT> return measure_str + '*' + printer._print(self._base_instance) | Denotes product of base- basis dependent quantity with a scalar. | 6259906832920d7e50bc77fa |
class GetTopTagsInputSet(InputSet): <NEW_LINE> <INDENT> def set_APIKey(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('APIKey', value) <NEW_LINE> <DEDENT> def set_Album(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('Album', value) <NEW_LINE> <DEDENT> def set_Artist(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('Artist', value) <NEW_LINE> <DEDENT> def set_AutoCorrect(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('AutoCorrect', value) <NEW_LINE> <DEDENT> def set_MbID(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('MbID', value) | An InputSet with methods appropriate for specifying the inputs to the GetTopTags
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 62599068009cb60464d02cee |
class LabelEditEventArgs(EventArgs): <NEW_LINE> <INDENT> def __getitem__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(self,item,label=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> CancelEdit=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Item=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Label=property(lambda self: object(),lambda self,v: None,lambda self: None) | Provides data for the System.Windows.Forms.ListView.BeforeLabelEdit and System.Windows.Forms.ListView.AfterLabelEdit events.
LabelEditEventArgs(item: int)
LabelEditEventArgs(item: int,label: str) | 6259906855399d3f05627cd5 |
class Memory(core.BaseReader): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.header = MemoryHeader() <NEW_LINE> self.data = MemoryData() <NEW_LINE> self.inputter = MemoryInputter() <NEW_LINE> self.outputter = core.TableOutputter() <NEW_LINE> self.meta = {} <NEW_LINE> self.keywords = [] <NEW_LINE> <DEDENT> def read(self, table): <NEW_LINE> <INDENT> self.data.header = self.header <NEW_LINE> self.header.data = self.data <NEW_LINE> self.lines = self.inputter.get_lines(table, self.header.names) <NEW_LINE> self.data.get_data_lines(self.lines) <NEW_LINE> self.header.get_cols(self.lines) <NEW_LINE> cols = self.header.cols <NEW_LINE> n_data_cols = len(self.header.names) <NEW_LINE> self.data.splitter.cols = cols <NEW_LINE> for i, str_vals in enumerate(self.data.get_str_vals()): <NEW_LINE> <INDENT> if len(list(str_vals)) != n_data_cols: <NEW_LINE> <INDENT> errmsg = ('Number of header columns (%d) inconsistent with ' 'data columns (%d) at data line %d\n' 'Header values: %s\n' 'Data values: %s' % (len(cols), len(str_vals), i, [x.name for x in cols], str_vals)) <NEW_LINE> raise core.InconsistentTableError(errmsg) <NEW_LINE> <DEDENT> for col in cols: <NEW_LINE> <INDENT> col.str_vals.append(str_vals[col.index]) <NEW_LINE> <DEDENT> <DEDENT> self.data.masks(cols) <NEW_LINE> self.cols = cols <NEW_LINE> if hasattr(table, 'keywords'): <NEW_LINE> <INDENT> self.keywords = table.keywords <NEW_LINE> <DEDENT> self.outputter.default_converters = [((lambda vals: vals), core.IntType), ((lambda vals: vals), core.FloatType), ((lambda vals: vals), core.StrType)] <NEW_LINE> self.table = self.outputter(cols) <NEW_LINE> self.cols = self.header.cols <NEW_LINE> return self.table <NEW_LINE> <DEDENT> def write(self, table=None): <NEW_LINE> <INDENT> raise NotImplementedError | Read a table from a data object in memory. Several input data formats are supported:
**Output of asciitable.read()**::
table = asciitable.get_reader(Reader=asciitable.Daophot)
data = table.read('t/daophot.dat')
mem_data_from_table = asciitable.read(table, Reader=asciitable.Memory)
mem_data_from_data = asciitable.read(data, Reader=asciitable.Memory)
**Numpy structured array**::
data = numpy.zeros((2,), dtype=[('col1','i4'), ('col2','f4'), ('col3', 'a10')])
data[:] = [(1, 2., 'Hello'), (2, 3., "World")]
mem_data = asciitable.read(data, Reader=asciitable.Memory)
**Numpy masked structured array**::
data = numpy.ma.zeros((2,), dtype=[('col1','i4'), ('col2','f4'), ('col3', 'a10')])
data[:] = [(1, 2., 'Hello'), (2, 3., "World")]
data['col2'] = ma.masked
mem_data = asciitable.read(data, Reader=asciitable.Memory)
In the current version all masked values will be converted to nan.
**Sequence of sequences**::
data = [[1, 2, 3 ],
[4, 5.2, 6.1 ],
[8, 9, 'hello']]
mem_data = asciitable.read(data, Reader=asciitable.Memory, names=('c1','c2','c3'))
**Dict of sequences**::
data = {'c1': [1, 2, 3],
'c2': [4, 5.2, 6.1],
'c3': [8, 9, 'hello']}
mem_data = asciitable.read(data, Reader=asciitable.Memory, names=('c1','c2','c3')) | 62599068460517430c432c2f |
class JsonKeyNotExistedError(ModelKeyNotExistError): <NEW_LINE> <INDENT> def __init__(self, fk: str, model_name: str): <NEW_LINE> <INDENT> super().__init__(f"Json key `{fk}` not existed in the model `{model_name}`.") | Raised if the json key does not exist in the model. | 625990687d43ff2487427feb |
class Mish(nn.Module): <NEW_LINE> <INDENT> def forward(self, input: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> return input * torch.tanh(torch.nn.functional.softplus(input)) | Applies the element-wise function:
.. math::
\text{Mish}(x) = x * tanh(\text{softplus}(x)).
Citation: Mish: A Self Regularized Non-Monotonic Activation Function, Diganta Misra, 2019, https://arxiv.org/abs/1908.08681.
Shape:
- Input: :math:`(N, *)` where `*` means, any number of additional
dimensions
- Output: :math:`(N, *)`, same shape as the input
Examples::
>>> m = Act['mish']()
>>> input = torch.randn(2)
>>> output = m(input) | 6259906821bff66bcd72441b |
class ManufactureViewSet(mixins.RetrieveModelMixin, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> model = Manufacture <NEW_LINE> permission_classes = [IsAuthenticated, IsAdminUser,] <NEW_LINE> serializer_class = ManufactureDeviceSerializer <NEW_LINE> queryset = Manufacture.objects.all() <NEW_LINE> filter_class = ManufactureSerializerFilter <NEW_LINE> filter_backends = (filters.OrderingFilter, filters.DjangoFilterBackend) <NEW_LINE> filter_fields = ('device_sn','device_type','pcba_srl') | List all device manufactured, or manufacture a new device. | 62599068627d3e7fe0e0863e |
class CallInstruction(BaseInstruction): <NEW_LINE> <INDENT> ret_names = List().tag(pref=True) <NEW_LINE> action_kwargs = Typed(OrderedDict, ()).tag(pref=(ordered_dict_to_pref, ordered_dict_from_pref)) <NEW_LINE> def prepare(self): <NEW_LINE> <INDENT> source = ( "def _call_(driver, kwargs, **ch_ids):" " return {path}(**kwargs)") <NEW_LINE> local = {} <NEW_LINE> exec(source.format(', '.join(self.ch_ids), self.path), local) <NEW_LINE> self._caller = local['_call_'] <NEW_LINE> <DEDENT> def execute(self, task, driver): <NEW_LINE> <INDENT> ch_ids = {k: task.format_and_eval_string(v) for k, v in self.ch_ids.items()} <NEW_LINE> action_kwargs = {k: task.format_and_eval_string(v) for k, v in self.action_kwargs.items()} <NEW_LINE> res = self._caller(driver, action_kwargs, **ch_ids) <NEW_LINE> if self.ret_names: <NEW_LINE> <INDENT> for i, name in enumerate(self.get_names): <NEW_LINE> <INDENT> task.write_in_database(name, res[i]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> task.write_in_database(self.id, res) <NEW_LINE> <DEDENT> <DEDENT> _setter = Callable() <NEW_LINE> def _post_setattr_ret_names(self, old, new): <NEW_LINE> <INDENT> if new: <NEW_LINE> <INDENT> self.database_entries = {self.id + '_' + rn: 1.0 for rn in new} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {self.id: 1.0} <NEW_LINE> <DEDENT> <DEDENT> def _default_database_entries(self): <NEW_LINE> <INDENT> return {self.id: 1.0} | Call an instrument action and store the result in the database.
| 6259906823849d37ff85286a |
class KeyStorage(KeyStorageType_): <NEW_LINE> <INDENT> c_tag = 'KeyStorage' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = KeyStorageType_.c_children.copy() <NEW_LINE> c_attributes = KeyStorageType_.c_attributes.copy() <NEW_LINE> c_child_order = KeyStorageType_.c_child_order[:] <NEW_LINE> c_cardinality = KeyStorageType_.c_cardinality.copy() | The urn:oasis:names:tc:SAML:2.0:ac:classes:TLSClient:KeyStorage element | 6259906867a9b606de54767c |
class FeatureTopDownExpandRule(TopDownExpandRule): <NEW_LINE> <INDENT> def __init__(self, trace=0): <NEW_LINE> <INDENT> TopDownExpandRule.__init__(self) <NEW_LINE> self.unify_memo = {} <NEW_LINE> self.trace = trace <NEW_LINE> <DEDENT> def apply_iter(self, chart, grammar, edge): <NEW_LINE> <INDENT> if edge.is_complete(): return <NEW_LINE> for prod in grammar.productions(): <NEW_LINE> <INDENT> bindings = edge.vars().copy() <NEW_LINE> try: <NEW_LINE> <INDENT> unified = unify(edge.next(), prod.lhs(), bindings, {}, memo=self.unify_memo, trace=self.trace - 2) <NEW_LINE> if isinstance(unified, Category): unified.freeze() <NEW_LINE> <DEDENT> except UnificationFailure: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> new_edge = FeatureTreeEdge.from_production(prod, edge.end()) <NEW_LINE> if chart.insert(new_edge, ()): <NEW_LINE> <INDENT> yield new_edge | The @C{TopDownExpandRule} specialised for feature-based grammars. | 625990681b99ca4002290110 |
class SharedSessionByEmailManager(models.Manager): <NEW_LINE> <INDENT> pass | Manager for the SharedSessionByEmail model | 62599068a8370b77170f1b7a |
class SampleNodeV5(desc.Node): <NEW_LINE> <INDENT> inputs = [ desc.File(name='in', label='Input', description='', value='', uid=[0]), desc.ListAttribute(name='paramA', label='ParamA', elementDesc=desc.GroupAttribute( groupDesc=SampleGroupV2, name='gA', label='gA', description=''), description='') ] <NEW_LINE> outputs = [ desc.File(name='output', label='Output', description='', value=desc.Node.internalFolder, uid=[]) ] | Changes from V4:
* 'paramA' elementDesc has changed from SampleGroupV1 to SampleGroupV2 | 62599068e1aae11d1e7cf3e7 |
class DEMove(RedBlueMove): <NEW_LINE> <INDENT> def __init__(self, sigma=1.0e-5, gamma0=None, **kwargs): <NEW_LINE> <INDENT> self.sigma = sigma <NEW_LINE> self.gamma0 = gamma0 <NEW_LINE> kwargs["nsplits"] = 3 <NEW_LINE> super(DEMove, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def setup(self, coords): <NEW_LINE> <INDENT> self.g0 = self.gamma0 <NEW_LINE> if self.g0 is None: <NEW_LINE> <INDENT> ndim = coords.shape[1] <NEW_LINE> self.g0 = 2.38 / np.sqrt(2 * ndim) <NEW_LINE> <DEDENT> <DEDENT> def get_proposal(self, s, c, random): <NEW_LINE> <INDENT> Ns = len(s) <NEW_LINE> Nc = list(map(len, c)) <NEW_LINE> ndim = s.shape[1] <NEW_LINE> q = np.empty((Ns, ndim), dtype=np.float64) <NEW_LINE> f = self.sigma * random.randn(Ns) <NEW_LINE> for i in range(Ns): <NEW_LINE> <INDENT> w = np.array([c[j][random.randint(Nc[j])] for j in range(2)]) <NEW_LINE> random.shuffle(w) <NEW_LINE> g = np.diff(w, axis=0) * self.g0 + f[i] <NEW_LINE> q[i] = s[i] + g <NEW_LINE> <DEDENT> return q, np.zeros(Ns, dtype=np.float64) | A proposal using differential evolution.
This `Differential evolution proposal
<http://www.stat.columbia.edu/~gelman/stuff_for_blog/cajo.pdf>`_ is
implemented following `Nelson et al. (2013)
<https://arxiv.org/abs/1311.5229>`_.
Args:
sigma (float): The standard deviation of the Gaussian used to stretch
the proposal vector.
gamma0 (Optional[float]): The mean stretch factor for the proposal
vector. By default, it is :math:`2.38 / \sqrt{2\,\mathrm{ndim}}`
as recommended by the two references. | 62599068dd821e528d6da55c |
@inherit_doc <NEW_LINE> class RandomForestRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasSeed, RandomForestParams, TreeRegressorParams, HasCheckpointInterval, JavaMLWritable, JavaMLReadable, HasVarianceCol): <NEW_LINE> <INDENT> @keyword_only <NEW_LINE> def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, featureSubsetStrategy="auto", varianceCol=None): <NEW_LINE> <INDENT> super(RandomForestRegressor, self).__init__() <NEW_LINE> self._java_obj = self._new_java_obj( "org.apache.spark.ml.regression.RandomForestRegressor", self.uid) <NEW_LINE> self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", subsamplingRate=1.0, numTrees=20, featureSubsetStrategy="auto") <NEW_LINE> kwargs = self.__init__._input_kwargs <NEW_LINE> self.setParams(**kwargs) <NEW_LINE> <DEDENT> @keyword_only <NEW_LINE> @since("1.4.0") <NEW_LINE> def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, featureSubsetStrategy="auto", varianceCol=None): <NEW_LINE> <INDENT> kwargs = self.setParams._input_kwargs <NEW_LINE> return self._set(**kwargs) <NEW_LINE> <DEDENT> def _create_model(self, java_model): <NEW_LINE> <INDENT> return RandomForestRegressionModel(java_model) | `Random Forest <http://en.wikipedia.org/wiki/Random_forest>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from numpy import allclose
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> rf = RandomForestRegressor(numTrees=2, maxDepth=2, seed=42)
>>> model = rf.fit(df)
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> allclose(model.treeWeights, [1.0, 1.0])
True
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> model.numFeatures
1
>>> model.trees
[DecisionTreeRegressionModel (uid=...) of depth..., DecisionTreeRegressionModel...]
>>> model.getNumTrees
2
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
0.5
>>> rfr_path = temp_path + "/rfr"
>>> rf.save(rfr_path)
>>> rf2 = RandomForestRegressor.load(rfr_path)
>>> rf2.getNumTrees()
2
>>> model_path = temp_path + "/rfr_model"
>>> model.save(model_path)
>>> model2 = RandomForestRegressionModel.load(model_path)
>>> model.featureImportances == model2.featureImportances
True
.. versionadded:: 1.4.0 | 62599068462c4b4f79dbd1bc |
class AlphanumericDataGrid(DataGrid): <NEW_LINE> <INDENT> def __init__(self, request, queryset, sortable_column, extra_regex='^[0-9].*', *args, **kwargs): <NEW_LINE> <INDENT> self.current_letter = request.GET.get('letter', 'all') <NEW_LINE> regex_match = re.compile(extra_regex) <NEW_LINE> if self.current_letter == 'all': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif self.current_letter.isalpha(): <NEW_LINE> <INDENT> queryset = queryset.filter(**{ sortable_column + '__istartswith': self.current_letter }) <NEW_LINE> <DEDENT> elif regex_match.match(self.current_letter): <NEW_LINE> <INDENT> queryset = queryset.filter(**{ sortable_column + '__regex': extra_regex }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> super(AlphanumericDataGrid, self).__init__(request, queryset, *args, **kwargs) <NEW_LINE> self.extra_context['current_letter'] = self.current_letter <NEW_LINE> self.extra_context['letters'] = (['all', '0'] + list(string.ascii_uppercase)) <NEW_LINE> self.special_query_args.append('letter') <NEW_LINE> self.paginator_template = 'datagrid/alphanumeric_paginator.html' | A DataGrid subclass for an alphanumerically-paginated datagrid.
This is useful for datasets that need to be queried alphanumerically,
according to the starting character of their ``sortable`` column. | 625990683d592f4c4edbc694 |
class TripletLoss(Layer): <NEW_LINE> <INDENT> def __init__(self, margin, epsilon=1e-6, **kwargs): <NEW_LINE> <INDENT> self.margin = margin <NEW_LINE> self.epsilon = epsilon <NEW_LINE> super(TripletLoss, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> super(TripletLoss, self).build(input_shape) <NEW_LINE> <DEDENT> def call(self, x): <NEW_LINE> <INDENT> anchors = x[0] <NEW_LINE> positives = x[1] <NEW_LINE> negatives = x[2] <NEW_LINE> pos_dists = K.sqrt(K.relu(K.sum(K.square(anchors-positives), axis=1))+self.epsilon) <NEW_LINE> neg_dists = K.sqrt(K.relu(K.sum(K.square(anchors-negatives), axis=1))+self.epsilon) <NEW_LINE> return K.mean(K.relu(pos_dists - neg_dists + self.margin)) | Computes the triplet distance loss given the triplet embeddings. The input to the layer is a list of tensors in the following order: `[anchor_embeddings, positive_embeddings, negative_embeddings]`. Note that this is the naive version of the triplet loss; no in-batch mining is done for "hard" or "semi-hard" triplets.
The output of the layer can be passed to Model.add_loss() as it is intended to be minimized directly without comparison to labels.
# Arguments
margin: The margin between inter-class distances and intra-class distances.
epsilon: Small number to add before sqrt. Defaults to 1e-6
# Input shapes
list of 2D tensors with shapes: `[(batch_size, latent_dim), (batch_size, latent_dim), (batch_size, latent_dim)]`
# Output shape
1D Tensor with shape `(batch_size,)` | 62599068baa26c4b54d50a5c |
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> class SMTPReturnerTestCase(TestCase, LoaderModuleMockMixin): <NEW_LINE> <INDENT> def setup_loader_modules(self): <NEW_LINE> <INDENT> return {smtp: {}} <NEW_LINE> <DEDENT> def _test_returner(self, mocked_smtplib, *args): <NEW_LINE> <INDENT> ret = {'id': '12345', 'fun': 'mytest.func', 'fun_args': 'myfunc args', 'jid': '54321', 'return': 'The room is on fire as shes fixing her hair'} <NEW_LINE> options = {'username': '', 'tls': '', 'from': '', 'fields': 'id,fun,fun_args,jid,return', 'to': '', 'host': '', 'renderer': 'jinja|yaml', 'template': '', 'password': '', 'gpgowner': '', 'subject': ''} <NEW_LINE> with patch('salt.returners.smtp_return._get_options', MagicMock(return_value=options)): <NEW_LINE> <INDENT> smtp.returner(ret) <NEW_LINE> self.assertTrue(mocked_smtplib.return_value.sendmail.called) <NEW_LINE> <DEDENT> <DEDENT> if HAS_GNUPG: <NEW_LINE> <INDENT> @patch('salt.returners.smtp_return.gnupg') <NEW_LINE> @patch('salt.returners.smtp_return.smtplib.SMTP') <NEW_LINE> def test_returner(self, mocked_smtplib, *args): <NEW_LINE> <INDENT> with patch.dict(smtp.__opts__, {'extension_modules': '', 'renderer': 'jinja|yaml', 'renderer_blacklist': [], 'renderer_whitelist': [], 'file_roots': [], 'pillar_roots': [], 'cachedir': '/'}): <NEW_LINE> <INDENT> self._test_returner(mocked_smtplib, *args) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> @patch('salt.returners.smtp_return.smtplib.SMTP') <NEW_LINE> def test_returner(self, mocked_smtplib, *args): <NEW_LINE> <INDENT> with patch.dict(smtp.__opts__, {'extension_modules': '', 'renderer': 'jinja|yaml', 'renderer_blacklist': [], 'renderer_whitelist': [], 'file_roots': [], 'pillar_roots': [], 'cachedir': '/'}): <NEW_LINE> <INDENT> self._test_returner(mocked_smtplib, *args) | Test SMTP returner | 625990687b180e01f3e49c3f |
class SaneDefList(Extension): <NEW_LINE> <INDENT> class Prep(Preprocessor): <NEW_LINE> <INDENT> def run(self, lines): <NEW_LINE> <INDENT> new_lines = [] <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> match = re.match(r'^(\s*)([^:]+):\s{2,}(.+)', line) <NEW_LINE> if match: <NEW_LINE> <INDENT> new_lines.append(match.group(1) + match.group(2)) <NEW_LINE> new_lines.append(match.group(1) + ': ' + match.group(3)) <NEW_LINE> new_lines.append('') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_lines.append(line) <NEW_LINE> <DEDENT> <DEDENT> return new_lines <NEW_LINE> <DEDENT> <DEDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> md.preprocessors.add('sane-def-list', SaneDefList.Prep(md), '_end') | ## Better definition lists | 6259906801c39578d7f14310 |
class StorageInventoryPage(BasePage): <NEW_LINE> <INDENT> def __init__(self, driver): <NEW_LINE> <INDENT> BasePage.__init__(self, driver, __file__) <NEW_LINE> self.driver = driver <NEW_LINE> self.carton_locator = None <NEW_LINE> self.next_step_locator = None <NEW_LINE> self.outer_box_code_locator = None <NEW_LINE> self.commoditycode_locator = None <NEW_LINE> self.amount_locator = None <NEW_LINE> self.units_locator = None <NEW_LINE> self.box_locator = None <NEW_LINE> self.piece_locator = None <NEW_LINE> self.mention_locator = None <NEW_LINE> self.bag_locator = None <NEW_LINE> self.determine_locator = None <NEW_LINE> self.storage_code_locator = None <NEW_LINE> try: <NEW_LINE> <INDENT> self.is_loaded() <NEW_LINE> <DEDENT> except ParseXmlErrorException: <NEW_LINE> <INDENT> assert False <NEW_LINE> <DEDENT> <DEDENT> def initial_element(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.is_loaded() <NEW_LINE> self.page_factory() <NEW_LINE> <DEDENT> except NoSuchWindowException(): <NEW_LINE> <INDENT> BasePage.screen_shot(self) <NEW_LINE> assert False <NEW_LINE> <DEDENT> pass <NEW_LINE> <DEDENT> def is_loaded(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def page_factory(self): <NEW_LINE> <INDENT> name_list = ['Carton', 'Next_step', 'Outer_box_code', 'Commodity_code', 'Amount', 'Units', 'Box', 'Piece', 'Mention', 'Bag', 'Determine', 'Storage_code'] <NEW_LINE> ele_dic = page_element_factory(__file__, name_list) <NEW_LINE> self.carton_locator = ele_dic['Carton'] <NEW_LINE> self.next_step_locator = ele_dic['Next_step'] <NEW_LINE> self.outer_box_code_locator = ele_dic['Outer_box_code'] <NEW_LINE> self.commoditycode_locator = ele_dic['CommodityCode'] <NEW_LINE> self.amount_locator = ele_dic['Amount'] <NEW_LINE> self.units_locator = ele_dic['Units'] <NEW_LINE> self.box_locator = ele_dic['Box'] <NEW_LINE> self.piece_locator = ele_dic['Piece'] <NEW_LINE> self.mention_locator = ele_dic['Mention'] <NEW_LINE> self.bag_locator = ele_dic['Bag'] <NEW_LINE> self.determine_locator = ele_dic['Determine'] <NEW_LINE> self.storage_code_locator = ele_dic['Storage_code'] <NEW_LINE> pass <NEW_LINE> <DEDENT> def click_carton(self, carton=None): <NEW_LINE> <INDENT> self.initial_element() <NEW_LINE> self.action.click(self.carton_locator).send_keys(carton) <NEW_LINE> sleep(2) <NEW_LINE> self.action.click(self.next_step_locator) <NEW_LINE> pass <NEW_LINE> <DEDENT> def click_inventory(self, outer_box_code=None, commodity_code=None, amount=None, storage_code=None): <NEW_LINE> <INDENT> self.initial_element() <NEW_LINE> self.action.click(self.outer_box_code_locator).send_keys(outer_box_code) <NEW_LINE> sleep(2) <NEW_LINE> self.action.click(self.commoditycode_locator).send_keys(commodity_code) <NEW_LINE> sleep(2) <NEW_LINE> self.action.click(self.amount_locator).send_keys(amount) <NEW_LINE> sleep(2) <NEW_LINE> self.action.click(self.storage_code_locator).send_keys(storage_code) <NEW_LINE> pass | 功能:上架详细信息页面 | 625990688e7ae83300eea845 |
class HistBinDoaneSelector(HistBinSelector): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> x = self._x <NEW_LINE> if x.size <= 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sg1 = np.sqrt(6.0 * (x.size - 2) / ((x.size + 1.0) * (x.size + 3))) <NEW_LINE> sigma = mt.std(x) <NEW_LINE> g1 = mt.mean(((x - mt.mean(x)) / sigma)**3) <NEW_LINE> ret = _ptp(self._raw_range) / (1.0 + np.log2(x.size) + mt.log2(1.0 + mt.absolute(g1) / sg1)) <NEW_LINE> return mt.where(sigma > 0.0, ret, 0.0) <NEW_LINE> <DEDENT> def get_result(self): <NEW_LINE> <INDENT> if self._x.size <= 2: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().get_result() | Doane's histogram bin estimator.
Improved version of Sturges' formula which works better for
non-normal data. See
stats.stackexchange.com/questions/55134/doanes-formula-for-histogram-binning | 625990687d847024c075db90 |
class TestInlineResponse2XX3(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return InlineResponse2XX3( symbols = [ tradier_asyncio.models.inline_response_2_xx_3_symbols.inline_response_2XX_3_symbols( root_symbol = '0', options = [ '0' ], ) ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return InlineResponse2XX3( ) <NEW_LINE> <DEDENT> <DEDENT> def testInlineResponse2XX3(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | InlineResponse2XX3 unit test stubs | 625990687d43ff2487427fec |
class IPV4ReputationDownloader(DownloaderBase): <NEW_LINE> <INDENT> DOWNTIME_INDEX: str = "ipv4_reputation" <NEW_LINE> DOWNLOAD_FREQUENCY: int = 1 <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> self.destination = os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.IPV4_REPUTATION_FILENAME, ) <NEW_LINE> self.download_link = PyFunceble.storage.IPV4_REPUTATION_DUMP_LINK <NEW_LINE> super().__init__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def authorized(self) -> bool: <NEW_LINE> <INDENT> return True | Provides the downloader of our user agent file. | 625990683539df3088ecda56 |
class InvalidArgumentError(KbhffApiError): <NEW_LINE> <INDENT> pass | Raised when the passed arguments are invalid. | 625990684e4d562566373bbe |
class TrainTypesDelegate(QtWidgets.QStyledItemDelegate): <NEW_LINE> <INDENT> def createEditor(self, parent, option, index): <NEW_LINE> <INDENT> simulation = index.model().sourceModel().simulation <NEW_LINE> comboBox = QtWidgets.QComboBox(parent) <NEW_LINE> comboBox.setModel(simulation.trainTypesModel) <NEW_LINE> comboBox.setModelColumn(0) <NEW_LINE> return comboBox <NEW_LINE> <DEDENT> def setEditorData(self, editor, index): <NEW_LINE> <INDENT> simulation = index.model().sourceModel().simulation <NEW_LINE> code = index.data(Qt.EditRole) <NEW_LINE> startSearchIndex = simulation.trainTypesModel.index(0, 0) <NEW_LINE> trainTypeIndexes = simulation.trainTypesModel.match( startSearchIndex, Qt.DisplayRole, code, 1, Qt.MatchExactly | Qt.MatchWrap ) <NEW_LINE> if len(trainTypeIndexes) > 0: <NEW_LINE> <INDENT> trainTypeIndex = trainTypeIndexes[0].row() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> trainTypeIndex = 0 <NEW_LINE> <DEDENT> editor.setCurrentIndex(trainTypeIndex) <NEW_LINE> <DEDENT> def setModelData(self, editor, model, index): <NEW_LINE> <INDENT> code = editor.currentText() <NEW_LINE> model.setData(index, code, Qt.EditRole) <NEW_LINE> <DEDENT> def updateEditorGeometry(self, editor, option, index): <NEW_LINE> <INDENT> editor.setGeometry(option.rect) | TrainTypesDelegate is a delegate that provides a combo box for
selecting a TrainType. | 625990682ae34c7f260ac89f |
class TestFile: <NEW_LINE> <INDENT> def __init__(self, root_directory, filename): <NEW_LINE> <INDENT> self.source_name = filename <NEW_LINE> extensionless, _ = path.splitext(filename) <NEW_LINE> self.binary_name = extensionless + ".sept" <NEW_LINE> self.name = path.relpath(extensionless, root_directory) <NEW_LINE> <DEDENT> def run(self, distribution_root): <NEW_LINE> <INDENT> self.result = self._compile_and_execute(distribution_root) <NEW_LINE> return self.result <NEW_LINE> <DEDENT> def _compile_and_execute(self, distribution_root): <NEW_LINE> <INDENT> compiler = path.join(distribution_root, "py/sepcompiler.py") <NEW_LINE> compilation_error = subprocess.call([sys.executable, compiler, self.source_name, self.binary_name]) <NEW_LINE> if compilation_error: <NEW_LINE> <INDENT> return COMPILATION_FAILED <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> interpreter = path.join(distribution_root, "bin/09.exe") <NEW_LINE> if not path.exists(interpreter): <NEW_LINE> <INDENT> interpreter = path.join(distribution_root, "bin/09") <NEW_LINE> <DEDENT> if not path.exists(interpreter): <NEW_LINE> <INDENT> sys.stderr.write("Interpreter not found.\n") <NEW_LINE> sys.exit(2) <NEW_LINE> <DEDENT> output = subprocess.check_output([interpreter, self.binary_name]) <NEW_LINE> output = output.decode(sys.getdefaultencoding()) <NEW_LINE> output = output.replace("\r\n", "\n") <NEW_LINE> <DEDENT> except subprocess.CalledProcessError: <NEW_LINE> <INDENT> return EXECUTION_FAILED <NEW_LINE> <DEDENT> self.output = output <NEW_LINE> expected = self.expected_output() <NEW_LINE> if expected is None: <NEW_LINE> <INDENT> self.write_actual_output(output) <NEW_LINE> return FIRST_RUN <NEW_LINE> <DEDENT> elif expected != output: <NEW_LINE> <INDENT> self.write_actual_output(output) <NEW_LINE> return WRONG_OUTPUT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return OK <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def failed(self): <NEW_LINE> <INDENT> return self.result.status == TestResult.FAILURE <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> actual_output_file = path.splitext(self.source_name)[0] + ".actual" <NEW_LINE> if path.exists(self.binary_name): <NEW_LINE> <INDENT> os.unlink(self.binary_name) <NEW_LINE> <DEDENT> if path.exists(actual_output_file): <NEW_LINE> <INDENT> os.unlink(actual_output_file) <NEW_LINE> <DEDENT> <DEDENT> def bless_output(self): <NEW_LINE> <INDENT> actual_output_file = path.splitext(self.source_name)[0] + ".actual" <NEW_LINE> expected_output_file = path.splitext(self.source_name)[0] + ".expected" <NEW_LINE> if path.exists(expected_output_file): <NEW_LINE> <INDENT> os.unlink(expected_output_file) <NEW_LINE> <DEDENT> os.rename(actual_output_file, expected_output_file) <NEW_LINE> <DEDENT> def write_actual_output(self, output): <NEW_LINE> <INDENT> actual_output_file = path.splitext(self.source_name)[0] + ".actual" <NEW_LINE> with open(actual_output_file, "w") as f: <NEW_LINE> <INDENT> f.write(output) <NEW_LINE> <DEDENT> <DEDENT> def expected_output(self): <NEW_LINE> <INDENT> expected_output_file = path.splitext(self.source_name)[0] + ".expected" <NEW_LINE> if not path.exists(expected_output_file): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(expected_output_file, "r", encoding="utf8") as f: <NEW_LINE> <INDENT> return f.read() | Represents a single test and is responsible for running it to
conclusion. | 6259906871ff763f4b5e8f5d |
class SvNoiseNode(bpy.types.Node, SverchCustomTreeNode): <NEW_LINE> <INDENT> bl_idname = 'SvNoiseNode' <NEW_LINE> bl_label = 'Vector Noise' <NEW_LINE> bl_icon = 'OUTLINER_OB_EMPTY' <NEW_LINE> def changeMode(self, context): <NEW_LINE> <INDENT> if self.out_mode == 'SCALAR': <NEW_LINE> <INDENT> if 'Noise S' not in self.outputs: <NEW_LINE> <INDENT> self.outputs.remove(self.outputs[0]) <NEW_LINE> self.outputs.new('StringsSocket', 'Noise S', 'Noise S') <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> if self.out_mode == 'VECTOR': <NEW_LINE> <INDENT> if 'Noise V' not in self.outputs: <NEW_LINE> <INDENT> self.outputs.remove(self.outputs[0]) <NEW_LINE> self.outputs.new('VerticesSocket', 'Noise V', 'Noise V') <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> out_modes = [ ('SCALAR', 'Scalar', 'Scalar output', '', 1), ('VECTOR', 'Vector', 'Vector output', '', 2)] <NEW_LINE> out_mode = EnumProperty( items=out_modes, default='VECTOR', description='Output type', update=changeMode) <NEW_LINE> noise_type = EnumProperty( items=avail_noise, description="Noise type", update=updateNode) <NEW_LINE> noise_dict = {} <NEW_LINE> noise_f = {'SCALAR': noise.noise, 'VECTOR': noise.noise_vector} <NEW_LINE> def sv_init(self, context): <NEW_LINE> <INDENT> self.inputs.new('VerticesSocket', 'Vertices', 'Vertices') <NEW_LINE> self.outputs.new('VerticesSocket', 'Noise V', 'Noise V') <NEW_LINE> <DEDENT> def draw_buttons(self, context, layout): <NEW_LINE> <INDENT> layout.prop(self, 'out_mode', expand=True) <NEW_LINE> layout.prop(self, 'noise_type', text="Type") <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> if not self.noise_dict: <NEW_LINE> <INDENT> self.noise_dict = {t[0]: t[1] for t in inspect.getmembers(noise.types) if isinstance(t[1], int)} <NEW_LINE> <DEDENT> if not self.outputs[0].is_linked: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> verts = Vector_generate(self.inputs['Vertices'].sv_get()) <NEW_LINE> out = [] <NEW_LINE> n_t = self.noise_dict[self.noise_type] <NEW_LINE> n_f = self.noise_f[self.out_mode] <NEW_LINE> for obj in verts: <NEW_LINE> <INDENT> out.append([n_f(v, n_t) for v in obj]) <NEW_LINE> <DEDENT> if 'Noise V' in self.outputs: <NEW_LINE> <INDENT> self.outputs['Noise V'].sv_set(Vector_degenerate(out)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.outputs['Noise S'].sv_set(out) | Vector Noise node | 625990687c178a314d78e7c7 |
class SQLiteDataTimeStream(DataTimeStream): <NEW_LINE> <INDENT> def __init__(self, cur, query, id, labels=None, truncate=None, timeSpan=None, tz=None, TYPE='SQLite', conn=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.cur = cur <NEW_LINE> self.query = query <NEW_LINE> self.query_cur = None <NEW_LINE> self.truncate = truncate <NEW_LINE> self.tz = tz <NEW_LINE> self.labels = None <NEW_LINE> self.src_hits = 0 <NEW_LINE> self.timeSpan = timeSpan <NEW_LINE> self.TYPE = TYPE <NEW_LINE> self.conn = conn <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> if self.TYPE=='Postgres': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cur.execute(self.query) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.conn.rollback() <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.query_cur = self.cur.execute(self.query) <NEW_LINE> <DEDENT> self.src_hits += 1 <NEW_LINE> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.TYPE=='Postgres': <NEW_LINE> <INDENT> db_data = self.cur.fetchone() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db_data = self.query_cur.next() <NEW_LINE> <DEDENT> if not db_data: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> if self.TYPE=='Postgres': <NEW_LINE> <INDENT> db_data = self.cur.fetchone() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db_data = self.query_cur.next() <NEW_LINE> <DEDENT> if not db_data: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> <DEDENT> DataTime_Point_or_Slot = None <NEW_LINE> if not self.timeSpan: <NEW_LINE> <INDENT> data = json.loads(db_data[5]) <NEW_LINE> DataTime_Point_or_Slot = DataTimePoint(t = db_data[0], tz = self.tz, data = data, validity_region = None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = json.loads(db_data[5]) <NEW_LINE> DataTime_Point_or_Slot = DataTimeSlot(start = TimePoint(t=db_data[0], tz = self.tz), end = TimePoint(t=db_data[1], tz = self.tz), span = self.timeSpan, data = data, coverage = db_data[6]) <NEW_LINE> <DEDENT> if DataTime_Point_or_Slot is None: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> return DataTime_Point_or_Slot <NEW_LINE> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self.__next__() <NEW_LINE> <DEDENT> def get_statistics(self): <NEW_LINE> <INDENT> return {'src_hits': self.src_hits} | Data time stream implementation in SQLite | 625990687d847024c075db91 |
class InvalidCredentialsError(exceptions.Error): <NEW_LINE> <INDENT> pass | Raised if credentials are not usable. | 62599068a8370b77170f1b7c |
class L2Normalization(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, mode, **kwargs): <NEW_LINE> <INDENT> self._mode = mode <NEW_LINE> super(L2Normalization, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> return F.L2Normalization(x, mode=self._mode, name='l2_norm') <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = '{name}({_mode})' <NEW_LINE> return s.format(name=self.__class__.__name__, **self.__dict__) | Applies L2 Normalization to input.
Parameters
----------
mode : str
Mode of normalization.
See :func:`~mxnet.ndarray.L2Normalization` for available choices.
Inputs:
- **data**: input tensor with arbitrary shape.
Outputs:
- **out**: output tensor with the same shape as `data`. | 625990687047854f46340b6d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.