code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class CSSRule(cssutils.util.Base2): <NEW_LINE> <INDENT> COMMENT = -1 <NEW_LINE> UNKNOWN_RULE = 0 <NEW_LINE> STYLE_RULE = 1 <NEW_LINE> CHARSET_RULE = 2 <NEW_LINE> IMPORT_RULE = 3 <NEW_LINE> MEDIA_RULE = 4 <NEW_LINE> FONT_FACE_RULE = 5 <NEW_LINE> PAGE_RULE = 6 <NEW_LINE> NAMESPACE_RULE = 7 <NEW_LINE> _typestrings = ['UNKNOWN_RULE', 'STYLE_RULE', 'CHARSET_RULE', 'IMPORT_RULE', 'MEDIA_RULE', 'FONT_FACE_RULE', 'PAGE_RULE', 'NAMESPACE_RULE', 'COMMENT'] <NEW_LINE> def __init__(self, parentRule=None, parentStyleSheet=None, readonly=False): <NEW_LINE> <INDENT> super(CSSRule, self).__init__() <NEW_LINE> self._parent = parentRule <NEW_LINE> self._parentRule = parentRule <NEW_LINE> self._parentStyleSheet = parentStyleSheet <NEW_LINE> self._setSeq(self._tempSeq()) <NEW_LINE> self._readonly = False <NEW_LINE> <DEDENT> def _setAtkeyword(self, akw): <NEW_LINE> <INDENT> if not self.atkeyword or (self._normalize(akw) == self._normalize(self.atkeyword)): <NEW_LINE> <INDENT> self._atkeyword = akw <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._log.error(u'%s: Invalid atkeyword for this rule: %r' % (self._normalize(self.atkeyword), akw), error=xml.dom.InvalidModificationErr) <NEW_LINE> <DEDENT> <DEDENT> atkeyword = property(lambda self: self._atkeyword, _setAtkeyword, doc=u"Literal keyword of an @rule (e.g. ``@IMport``).") <NEW_LINE> def _setCssText(self, cssText): <NEW_LINE> <INDENT> self._checkReadonly() <NEW_LINE> <DEDENT> cssText = property(lambda self: u'', _setCssText, doc="(DOM) The parsable textual representation of the " "rule. This reflects the current state of the rule " "and not its initial value.") <NEW_LINE> parent = property(lambda self: self._parent, doc="The Parent Node of this CSSRule (currently if a " "CSSStyleDeclaration only!) or None.") <NEW_LINE> parentRule = property(lambda self: self._parentRule, doc="If this rule is contained inside " "another rule (e.g. a style rule inside " "an @media block), this is the containing " "rule. If this rule is not nested inside " "any other rules, this returns None.") <NEW_LINE> parentStyleSheet = property(lambda self: self._parentStyleSheet, doc="The style sheet that contains this rule.") <NEW_LINE> type = property(lambda self: self.UNKNOWN_RULE, doc="The type of this rule, as defined by a CSSRule " "type constant.") <NEW_LINE> typeString = property(lambda self: CSSRule._typestrings[self.type], doc="Descriptive name of this rule's type.") <NEW_LINE> wellformed = property(lambda self: False, doc=u"If the rule is wellformed.")
Abstract base interface for any type of CSS statement. This includes both rule sets and at-rules. An implementation is expected to preserve all rules specified in a CSS style sheet, even if the rule is not recognized by the parser. Unrecognized rules are represented using the :class:`CSSUnknownRule` interface.
6259905999cbb53fe68324a6
class Users(base.ECMCommand): <NEW_LINE> <INDENT> name = 'users' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.add_subcommand(List, default=True) <NEW_LINE> self.add_subcommand(Create) <NEW_LINE> self.add_subcommand(Remove) <NEW_LINE> self.add_subcommand(Edit) <NEW_LINE> self.add_subcommand(Move) <NEW_LINE> self.add_subcommand(Passwd) <NEW_LINE> self.add_subcommand(Search)
Manage ECM Users.
6259905945492302aabfda9f
class LocalBuild(build_ext): <NEW_LINE> <INDENT> def initialize_options(self): <NEW_LINE> <INDENT> super().initialize_options() <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> super().finalize_options() <NEW_LINE> print(f"compiler {type(self.compiler)}") <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> super().run() <NEW_LINE> for n, v in self.compiler.__dict__.items(): <NEW_LINE> <INDENT> print(f"compiler {n}: {v}")
A builder that knows our c++ compiler.
625990593c8af77a43b68a24
class User(AbstractUser): <NEW_LINE> <INDENT> last_action = models.DateTimeField(editable=False, null=True) <NEW_LINE> def save( self, force_insert=False, force_update=False, using=None, update_fields=None ): <NEW_LINE> <INDENT> self.set_password(self.password) <NEW_LINE> super().save(force_insert, force_update, using, update_fields)
Override standard user model
62599059097d151d1a2c2634
class _ControlEventManager: <NEW_LINE> <INDENT> def __init__(self, loop: asyncio.BaseEventLoop = None, priority: CONTROL_PRIORITY_LEVEL = None): <NEW_LINE> <INDENT> self._granted_event = asyncio.Event(loop=loop) <NEW_LINE> self._lost_event = asyncio.Event(loop=loop) <NEW_LINE> self._request_event = asyncio.Event(loop=loop) <NEW_LINE> self._has_control = False <NEW_LINE> self._priority = priority <NEW_LINE> self._is_shutdown = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def granted_event(self) -> asyncio.Event: <NEW_LINE> <INDENT> return self._granted_event <NEW_LINE> <DEDENT> @property <NEW_LINE> def lost_event(self) -> asyncio.Event: <NEW_LINE> <INDENT> return self._lost_event <NEW_LINE> <DEDENT> @property <NEW_LINE> def request_event(self) -> asyncio.Event: <NEW_LINE> <INDENT> return self._request_event <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_control(self) -> bool: <NEW_LINE> <INDENT> return self._has_control <NEW_LINE> <DEDENT> @property <NEW_LINE> def priority(self) -> CONTROL_PRIORITY_LEVEL: <NEW_LINE> <INDENT> return self._priority <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_shutdown(self) -> bool: <NEW_LINE> <INDENT> return self._is_shutdown <NEW_LINE> <DEDENT> def request(self, priority: CONTROL_PRIORITY_LEVEL = CONTROL_PRIORITY_LEVEL.TOP_PRIORITY_AI) -> None: <NEW_LINE> <INDENT> if priority is None: <NEW_LINE> <INDENT> raise Exception("Must provide a priority level to request. To disable control, use :func:`release()`.") <NEW_LINE> <DEDENT> self._priority = priority <NEW_LINE> self._request_event.set() <NEW_LINE> <DEDENT> def release(self) -> None: <NEW_LINE> <INDENT> self._priority = None <NEW_LINE> self._request_event.set() <NEW_LINE> <DEDENT> def update(self, enabled: bool) -> None: <NEW_LINE> <INDENT> self._has_control = enabled <NEW_LINE> if enabled: <NEW_LINE> <INDENT> self._granted_event.set() <NEW_LINE> self._lost_event.clear() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._lost_event.set() <NEW_LINE> self._granted_event.clear() <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self) -> None: <NEW_LINE> <INDENT> self._has_control = False <NEW_LINE> self._granted_event.set() <NEW_LINE> self._lost_event.set() <NEW_LINE> self._is_shutdown = True <NEW_LINE> self._request_event.set()
This manages every :class:`asyncio.Event` that handles the behavior control system. These include three events: granted, lost, and request. :class:`granted_event` represents the behavior system handing control to the SDK. :class:`lost_event` represents a higher priority behavior taking control away from the SDK. :class:`request_event` Is a way of alerting :class:`Connection` to request control.
625990590a50d4780f7068a2
class BiDirectionalRNNLayer(object): <NEW_LINE> <INDENT> def __init__(self, hidden_size, rnn_type='lstm'): <NEW_LINE> <INDENT> self.hidden_size = hidden_size <NEW_LINE> type_cast = {'lstm': LSTMCell, 'gru': GRUCell} <NEW_LINE> if rnn_type not in type_cast: <NEW_LINE> <INDENT> cell_type = GRUCell <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cell_type = type_cast[rnn_type] <NEW_LINE> <DEDENT> self.fw_cell = cell_type( num_units=self.hidden_size, state_is_tuple=True) <NEW_LINE> self.bw_cell = cell_type( num_units=self.hidden_size, state_is_tuple=True) <NEW_LINE> <DEDENT> def ops(self, input_emb, seq_length=None): <NEW_LINE> <INDENT> bi_outputs, bi_left_state = tf.nn.bidirectional_dynamic_rnn(self.fw_cell, self.bw_cell, input_emb, sequence_length=seq_length, dtype=tf.float32) <NEW_LINE> seq_encoder = tf.concat(bi_outputs, -1) <NEW_LINE> return seq_encoder
a layer class: Bi-directional LSTM/gru
62599059e76e3b2f99fd9fc7
class LandsatConfidence(object): <NEW_LINE> <INDENT> high = 3 <NEW_LINE> medium = 2 <NEW_LINE> low = 1 <NEW_LINE> undefined = 0 <NEW_LINE> none = -1
Level of confidence that a condition exists high - Algorithm has high confidence that this condition exists (67-100 percent confidence). medium - Algorithm has medium confidence that this condition exists (34-66 percent confidence). low - Algorithm has low to no confidence that this condition exists (0-33 percent confidence) undefined - Algorithm did not determine the status of this condition. none - Nothing.
6259905956ac1b37e63037cb
class SingleCellSimple(fixtures.Fixture): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(SingleCellSimple, self).setUp() <NEW_LINE> self.useFixture(fixtures.MonkeyPatch( 'nova.objects.CellMappingList._get_all_from_db', self._fake_cell_list)) <NEW_LINE> self.useFixture(fixtures.MonkeyPatch( 'nova.objects.CellMapping._get_by_uuid_from_db', self._fake_cell_get)) <NEW_LINE> self.useFixture(fixtures.MonkeyPatch( 'nova.objects.HostMapping._get_by_host_from_db', self._fake_hostmapping_get)) <NEW_LINE> self.useFixture(fixtures.MonkeyPatch( 'nova.context.target_cell', self._fake_target_cell)) <NEW_LINE> <DEDENT> def _fake_hostmapping_get(self, *args): <NEW_LINE> <INDENT> return {'id': 1, 'updated_at': None, 'created_at': None, 'host': 'host1', 'cell_mapping': self._fake_cell_list()[0]} <NEW_LINE> <DEDENT> def _fake_cell_get(self, *args): <NEW_LINE> <INDENT> return self._fake_cell_list()[0] <NEW_LINE> <DEDENT> def _fake_cell_list(self, *args): <NEW_LINE> <INDENT> return [{'id': 1, 'updated_at': None, 'created_at': None, 'uuid': uuidsentinel.cell1, 'name': 'onlycell', 'transport_url': 'fake://nowhere/', 'database_connection': 'sqlite:///'}] <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def _fake_target_cell(self, context, target_cell): <NEW_LINE> <INDENT> yield context
Setup the simplest cells environment possible This should be used when you do not care about multiple cells, or having a "real" environment for tests that should not care. This will give you a single cell, and map any and all accesses to that cell (even things that would go to cell0). If you need to distinguish between cell0 and cellN, then you should use the CellDatabases fixture.
625990590c0af96317c57843
class Round(Operator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Round, self).__init__() <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return singa.Round(x) <NEW_LINE> <DEDENT> def backward(self, dy): <NEW_LINE> <INDENT> dy = singa.Tensor(dy.shape(), dy.device()) <NEW_LINE> dy.SetFloatValue(0.) <NEW_LINE> return dy
Element-wise round the input
625990593cc13d1c6d466d08
class TestSpiderMiddleware1: <NEW_LINE> <INDENT> def process_request(self,request): <NEW_LINE> <INDENT> return request <NEW_LINE> <DEDENT> def process_response(self,response): <NEW_LINE> <INDENT> return response
实现下载器中间件
6259905945492302aabfdaa0
class DigitalTwinsEventRoute(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'endpoint_name': {'required': True}, 'filter': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, 'filter': {'key': 'filter', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(DigitalTwinsEventRoute, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.endpoint_name = kwargs['endpoint_name'] <NEW_LINE> self.filter = kwargs['filter']
A route which directs notification and telemetry events to an endpoint. Endpoints are a destination outside of Azure Digital Twins such as an EventHub. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The id of the event route. :vartype id: str :param endpoint_name: Required. The name of the endpoint this event route is bound to. :type endpoint_name: str :param filter: Required. An expression which describes the events which are routed to the endpoint. :type filter: str
625990597047854f46340988
class UserGroupInHostGroup(models.Model): <NEW_LINE> <INDENT> usergroup = models.ForeignKey(UserGroup) <NEW_LINE> hostgroup = models.ForeignKey(HostGroup) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ("usergroup", "hostgroup") <NEW_LINE> permissions = ( ( "list_usergroups_in_hostgroups", "Can see which hostgroups are assigned to which usergroups" ), ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "%s <=> %s" % (self.usergroup.name, self.hostgroup.name)
Assings usergroups to hostgroups, meaning that all users in the usergroup can login into all hosts in the hostgroup. Relates to :model:`keys.UserGroup` and :model:`keys.HostGroup`
625990593617ad0b5ee07713
class Item(object): <NEW_LINE> <INDENT> def __init__(self, name, ptree, locations, fields): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.ptree = ptree <NEW_LINE> self.locations = locations <NEW_LINE> self.fields = fields <NEW_LINE> self.common_prefix = self._common_prefix() <NEW_LINE> self.common_suffix = self._common_suffix() <NEW_LINE> self.min_jump = self._min_jump() <NEW_LINE> <DEDENT> @property <NEW_LINE> def dict(self): <NEW_LINE> <INDENT> return {'fields': {field.name: field.dict for field in self.fields}} <NEW_LINE> <DEDENT> def _common_prefix(self): <NEW_LINE> <INDENT> return common_prefix( item_location_tags(self.ptree, location) for location in self.locations) <NEW_LINE> <DEDENT> def _common_suffix(self): <NEW_LINE> <INDENT> return common_prefix( item_location_tags(self.ptree, location) for location in self.locations) <NEW_LINE> <DEDENT> def _min_jump_location(self, location): <NEW_LINE> <INDENT> return suffix_jump( item_location_tags(self.ptree, location), self.common_suffix) <NEW_LINE> <DEDENT> def _min_jump(self): <NEW_LINE> <INDENT> return max(self._min_jump_location(location) for location in self.locations)
A collection of fields and the location of the item instances Attributes ---------- name : string The item name ptree : PageTree locations : List[ItemLocation] fields : List[Field] common_prefix: List[HtmlFragment] Common prefix shared by all item locations common_suffix: List[HtmlFragment] Common suffix shared by all item locations min_jmp : int Parameter for scrapely tuning
6259905901c39578d7f1421b
class Veteranx(Estudante): <NEW_LINE> <INDENT> def __init__(self, dict_json): <NEW_LINE> <INDENT> self.ciente_de = dict_json['ciente_de'] <NEW_LINE> super().__init__(Tipo.VETERANX, dict_json) <NEW_LINE> <DEDENT> def atualizar(self, dict_atualizado): <NEW_LINE> <INDENT> super().atualizar(dict_atualizado) <NEW_LINE> self.apelido = dict_atualizado['apelido'] <NEW_LINE> self.numero_ingressantes = dict_atualizado['numero_ingressantes'] <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> dict_json = super().to_dict() <NEW_LINE> dict_json['apelido'] = self.apelido <NEW_LINE> dict_json['numero_ingressantes'] = self.numero_ingressantes <NEW_LINE> dict_json['ciente_de'] = self.ciente_de <NEW_LINE> return dict_json <NEW_LINE> <DEDENT> def afilhadxs(self): <NEW_LINE> <INDENT> return Estudante.database.afilhadxs(self) <NEW_LINE> <DEDENT> def afilhar(self, id_): <NEW_LINE> <INDENT> return Estudante.database.afilhar(self, id_)
Veteranx é a estrutura Estudante com as variáveis número de ingressantes que deseja apadrinhar e apelido
62599059adb09d7d5dc0bb33
class PackedVersionedCollectionCompactor(cronjobs.SystemCronFlow): <NEW_LINE> <INDENT> frequency = rdfvalue.Duration("5m") <NEW_LINE> lifetime = rdfvalue.Duration("40m") <NEW_LINE> @flow.StateHandler() <NEW_LINE> def Start(self): <NEW_LINE> <INDENT> processed_count = 0 <NEW_LINE> errors_count = 0 <NEW_LINE> freeze_timestamp = rdfvalue.RDFDatetime().Now() <NEW_LINE> for urn in collections.PackedVersionedCollection.QueryNotifications( timestamp=freeze_timestamp, token=self.token): <NEW_LINE> <INDENT> collections.PackedVersionedCollection.DeleteNotifications( [urn], end=freeze_timestamp, token=self.token) <NEW_LINE> self.HeartBeat() <NEW_LINE> try: <NEW_LINE> <INDENT> self.Compact(urn) <NEW_LINE> processed_count += 1 <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.Log("Error while processing %s", urn) <NEW_LINE> errors_count += 1 <NEW_LINE> <DEDENT> <DEDENT> self.Log("Total processed collections: %d, successful: %d, failed: %d", processed_count + errors_count, processed_count, errors_count) <NEW_LINE> <DEDENT> def Compact(self, urn): <NEW_LINE> <INDENT> fd = aff4.FACTORY.Open(urn, aff4_type="PackedVersionedCollection", mode="rw", age=aff4.ALL_TIMES, token=self.token) <NEW_LINE> num_compacted = fd.Compact(callback=self.HeartBeat) <NEW_LINE> fd.Close() <NEW_LINE> self.Log("Compacted %d items in %s", num_compacted, urn)
A Compactor which runs over all versioned collections.
62599059baa26c4b54d5086d
@dataclass <NEW_LINE> class FanSpeedDataTemplate: <NEW_LINE> <INDENT> def get_speed_config(self, resolved_data: dict[str, Any]) -> list[int] | None: <NEW_LINE> <INDENT> raise NotImplementedError
Mixin to define get_speed_config.
6259905999cbb53fe68324a8
class FCN(Network): <NEW_LINE> <INDENT> def __init__(self, nchannels=1, nlabels=2, cross_hair=False, dim=3, activation='tanh', levels=None, **kwargs): <NEW_LINE> <INDENT> if 'loading' in kwargs: <NEW_LINE> <INDENT> super(FCN, self).__init__(**kwargs) <NEW_LINE> return <NEW_LINE> <DEDENT> inputs = {'main_input': {'shape': (nchannels,) + (None,)*dim, 'dtype': 'float32'}} <NEW_LINE> layers = [] <NEW_LINE> if levels is None: <NEW_LINE> <INDENT> levels = [ {'filters': 5, 'kernel': 3}, {'filters': 10, 'kernel': 5}, {'filters': 20, 'kernel': 5}, {'filters': 50, 'kernel': 3}, ] <NEW_LINE> <DEDENT> conv = 'Conv3DCH' if cross_hair else 'Conv3D' <NEW_LINE> if dim==2: <NEW_LINE> <INDENT> conv = 'Conv2DCH' if cross_hair else 'Conv2D' <NEW_LINE> <DEDENT> curinputs = 'main_input' <NEW_LINE> cnt = 0 <NEW_LINE> for level in levels: <NEW_LINE> <INDENT> layers.append({ 'layer': conv, 'inputs': curinputs, 'sort': -cnt, 'params': { 'name': 'level_'+str(cnt), 'filters': level['filters'], 'kernel_size': (level['kernel'],)*dim, 'strides': (1,)*dim, 'padding': 'same', 'activation': activation, 'data_format': 'channels_first' } }) <NEW_LINE> curinputs = 'level_'+str(cnt) <NEW_LINE> cnt += 1 <NEW_LINE> <DEDENT> layers.append({ 'layer': 'Conv3D' if dim == 3 else 'Conv2D', 'inputs': curinputs, 'sort': -(cnt+1), 'params': { 'name': 'presoftmax', 'filters': nlabels, 'kernel_size': (1,)*dim, 'strides': (1,)*dim, 'padding': 'same', 'activation': 'linear', 'data_format': 'channels_first' } }) <NEW_LINE> layers.append({ 'layer': 'Softmax', 'inputs': 'presoftmax', 'sort': -(cnt + 2), 'params': { 'name': 'output', 'axis': 1 } }) <NEW_LINE> layers = sorted(layers, key=lambda i: i['sort'], reverse=True) <NEW_LINE> models = {'default': {'inputs': 'main_input', 'outputs': 'output'}} <NEW_LINE> kwargs['layers'] = layers <NEW_LINE> kwargs['input_shapes'] = inputs <NEW_LINE> kwargs['models'] = models <NEW_LINE> super(FCN, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def compile(self, loss=ls.categorical_crossentropy(1), optimizer='sgd', metrics=['acc'], **kwargs): <NEW_LINE> <INDENT> super(FCN, self).compile(models={'default': {'loss': loss, 'optimizer': optimizer, 'metrics': metrics}}) <NEW_LINE> <DEDENT> def fit(self, **kwargs): <NEW_LINE> <INDENT> return super(FCN, self).fit(model='default', **kwargs) <NEW_LINE> <DEDENT> def fit_generator(self, **kwargs): <NEW_LINE> <INDENT> return super(FCN, self).fit_generator(model='default', **kwargs) <NEW_LINE> <DEDENT> def predict_generator(self, **kwargs): <NEW_LINE> <INDENT> return super(FCN, self).predict_generator(model='default', **kwargs) <NEW_LINE> <DEDENT> def predict(self, **kwargs): <NEW_LINE> <INDENT> return super(FCN, self).predict(model='default', **kwargs) <NEW_LINE> <DEDENT> def evaluate(self, **kwargs): <NEW_LINE> <INDENT> return super(FCN, self).evaluate(model='default', **kwargs)
Implementation of DeepVesselNet-FCN with capability of using 3D/2D kernels with or with cross-hair filters
6259905930dc7b76659a0d64
class DatabaseClient: <NEW_LINE> <INDENT> def __init__(self, dbpass_path, dbname): <NEW_LINE> <INDENT> self.dbpass_path = dbpass_path <NEW_LINE> self.dbname = dbname <NEW_LINE> self.db_url = self.get_db_url() <NEW_LINE> self.engine = create_engine(self.db_url) <NEW_LINE> <DEDENT> def get_db_url(self): <NEW_LINE> <INDENT> with open(self.dbpass_path, 'r') as infile: <NEW_LINE> <INDENT> db_names = [] <NEW_LINE> for raw_url in infile.read().splitlines(): <NEW_LINE> <INDENT> url_obj = make_url(raw_url) <NEW_LINE> if url_obj.database == self.dbname: <NEW_LINE> <INDENT> infile.close() <NEW_LINE> return raw_url <NEW_LINE> <DEDENT> db_names.append(url_obj.database) <NEW_LINE> <DEDENT> <DEDENT> infile.close() <NEW_LINE> exit('database name does not exist in dbpass given:' + ', '.join(db_names)) <NEW_LINE> <DEDENT> def get_df_with_query(self, query): <NEW_LINE> <INDENT> return pd.read_sql(query, self.engine) <NEW_LINE> <DEDENT> def create_df_file_with_query(self, query, output): <NEW_LINE> <INDENT> chunk_size = 100000 <NEW_LINE> offset = 0 <NEW_LINE> data = defaultdict(lambda : defaultdict(list)) <NEW_LINE> with open(output, 'wb') as outfile: <NEW_LINE> <INDENT> query = query.replace(';', '') <NEW_LINE> query += """ LIMIT {chunk_size} OFFSET {offset};""" <NEW_LINE> while True: <NEW_LINE> <INDENT> print(offset) <NEW_LINE> query = query.format( chunk_size=chunk_size, offset=offset ) <NEW_LINE> df = pd.read_sql(query, self.engine) <NEW_LINE> pickle.dump(df, outfile) <NEW_LINE> offset += chunk_size <NEW_LINE> if len(df) < chunk_size: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> outfile.close()
Takes care of the database pass opening to find the url and can query the respected database. Input: dbpass_path path to the text file with the list of database urls dbname database name so we know which database to query from the list
625990594428ac0f6e659b05
class BlockStencilMatrixGlobalBasis(BlockMatrixNode): <NEW_LINE> <INDENT> def __new__(cls, trials, tests, pads, multiplicity, expr, tag=None): <NEW_LINE> <INDENT> if not isinstance(pads, (list, tuple, Tuple)): <NEW_LINE> <INDENT> raise TypeError('Expecting an iterable') <NEW_LINE> <DEDENT> pads = Tuple(*pads) <NEW_LINE> rank = 2*len(pads) <NEW_LINE> tag = tag or random_string( 6 ) <NEW_LINE> obj = Basic.__new__(cls, pads, multiplicity, rank, tag, expr) <NEW_LINE> obj._trials = trials <NEW_LINE> obj._tests = tests <NEW_LINE> return obj <NEW_LINE> <DEDENT> @property <NEW_LINE> def pads(self): <NEW_LINE> <INDENT> return self._args[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def multiplicity(self): <NEW_LINE> <INDENT> return self._args[1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def rank(self): <NEW_LINE> <INDENT> return self._args[2] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tag(self): <NEW_LINE> <INDENT> return self._args[3] <NEW_LINE> <DEDENT> @property <NEW_LINE> def expr(self): <NEW_LINE> <INDENT> return self._args[4] <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_scalar_space(self): <NEW_LINE> <INDENT> types = (H1SpaceType, L2SpaceType, UndefinedSpaceType) <NEW_LINE> spaces = self.trials.space <NEW_LINE> cond = False <NEW_LINE> for cls in types: <NEW_LINE> <INDENT> cond = cond or all(isinstance(space.kind, cls) for space in spaces) <NEW_LINE> <DEDENT> return cond
used to describe local dof over an element as a block stencil matrix
62599059097d151d1a2c2636
class NightingaleDetailView(DetailView, LoginRequiredMixin): <NEW_LINE> <INDENT> model = Admision <NEW_LINE> template_name = 'enfermeria/nightingale_detail.html' <NEW_LINE> slug_field = 'uuid' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(NightingaleDetailView, self).get_context_data(**kwargs) <NEW_LINE> context['fecha'] = timezone.now() <NEW_LINE> return context
Permite ver los datos de una :class:`Admision` desde la interfaz de enfermeria Esta vista se utiliza en conjunto con varias plantillas que permite mostrar una diversidad de datos como ser resumenes, :class:`NotaEnfermeria`s, :class:`SignoVital`es, :class:`OrdenMedica`s, y todos los demas datos que se relacionan con una :class:`Admision` de manera directa.
625990591f037a2d8b9e5350
class CampusCoursesFavorite(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> campus = models.ForeignKey(Campus) <NEW_LINE> courses = models.ManyToManyField(Course) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'feti'
Integrated model of relationships between Campus, Courses and user.
6259905907f4c71912bb0a05
class FromOptionalDependency: <NEW_LINE> <INDENT> def __init__(self, module, exception=None): <NEW_LINE> <INDENT> self._module = module <NEW_LINE> if exception is None: <NEW_LINE> <INDENT> exception = ( "The optional dependency '{}' is required for the " "functionality you tried to use.".format(self._module) ) <NEW_LINE> <DEDENT> if isinstance(exception, str): <NEW_LINE> <INDENT> exception = LewisException(exception) <NEW_LINE> <DEDENT> if not isinstance(exception, BaseException): <NEW_LINE> <INDENT> raise RuntimeError( "The exception parameter has to be either a string or a an instance of an " "exception type (derived from BaseException)." ) <NEW_LINE> <DEDENT> self._exception = exception <NEW_LINE> <DEDENT> def do_import(self, *names): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> module_object = importlib.import_module(self._module) <NEW_LINE> objects = tuple(getattr(module_object, name) for name in names) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> def failing_init(obj, *args, **kwargs): <NEW_LINE> <INDENT> raise self._exception <NEW_LINE> <DEDENT> objects = tuple( type(name, (object,), {"__init__": failing_init}) for name in names ) <NEW_LINE> <DEDENT> return objects if len(objects) != 1 else objects[0]
This is a utility class for importing classes from a module or replacing them with dummy types if the module can not be loaded. Assume module 'a' that does: .. sourcecode:: Python from b import C, D and module 'e' which does: .. sourcecode:: Python from a import F where 'b' is a hard to install dependency which is thus optional. To still be able to do: .. sourcecode:: Python import e without raising an error, for example for inspection purposes, this class can be used as a workaround in module 'a': .. sourcecode:: Python C, D = FromOptionalDependency('b').do_import('C', 'D') which is not as pretty as the actual syntax, but at least it can be read in a similar way. If the module 'b' can not be imported, stub-types are created that are called 'C' and 'D'. Everything depending on these types will work until any of those are instantiated - in that case an exception is raised. The exception can be controlled via the exception-parameter. If it is a string, a LewisException is constructed from it. Alternatively it can be an instance of an exception-type. If not provided, a LewisException with a standard message is constructed. If it is anything else, a RuntimeError is raised. Essentially, this class helps deferring ImportErrors until anything from the module that was attempted to load is actually used. :param module: Module from that symbols should be imported. :param exception: Text for LewisException or custom exception object.
62599059cb5e8a47e493cc6b
class BrewBoilerTemperature(RangeSetting): <NEW_LINE> <INDENT> address = 0x02 <NEW_LINE> range = (80, 110)
The desired temperature of the brew boiler.
625990592ae34c7f260ac6b1
class JSONSerializer(MetadataSerializer): <NEW_LINE> <INDENT> def __init__(self, compact: bool = False, validate: Optional[bool] = False): <NEW_LINE> <INDENT> self.compact = compact <NEW_LINE> self.validate = validate <NEW_LINE> <DEDENT> def serialize(self, metadata_obj: Metadata) -> bytes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> indent = None if self.compact else 1 <NEW_LINE> separators = (",", ":") if self.compact else (",", ": ") <NEW_LINE> json_bytes = json.dumps( metadata_obj.to_dict(), indent=indent, separators=separators, sort_keys=True, ).encode("utf-8") <NEW_LINE> if self.validate: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> new_md_obj = JSONDeserializer().deserialize(json_bytes) <NEW_LINE> if metadata_obj != new_md_obj: <NEW_LINE> <INDENT> raise ValueError( "Metadata changes if you serialize and deserialize." ) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValueError("Metadata cannot be validated!") from e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise SerializationError from e <NEW_LINE> <DEDENT> return json_bytes
Provides Metadata to JSON serialize method. Args: compact: A boolean indicating if the JSON bytes generated in 'serialize' should be compact by excluding whitespace. validate: Check that the metadata object can be deserialized again without change of contents and thus find common mistakes. This validation might slow down serialization significantly.
625990593cc13d1c6d466d0a
class MultiHeadedAttention(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_heads: int, size: int, dropout: float = 0.1): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert size % num_heads == 0 <NEW_LINE> self.head_size = head_size = size // num_heads <NEW_LINE> self.model_size = size <NEW_LINE> self.num_heads = num_heads <NEW_LINE> self.k_layer = nn.Linear(size, num_heads * head_size) <NEW_LINE> self.v_layer = nn.Linear(size, num_heads * head_size) <NEW_LINE> self.q_layer = nn.Linear(size, num_heads * head_size) <NEW_LINE> self.output_layer = nn.Linear(size, size) <NEW_LINE> self.softmax = nn.Softmax(dim=-1) <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> <DEDENT> def forward(self, k: Tensor, v: Tensor, q: Tensor, mask: Tensor = None): <NEW_LINE> <INDENT> batch_size = k.size(0) <NEW_LINE> num_heads = self.num_heads <NEW_LINE> k = self.k_layer(k) <NEW_LINE> v = self.v_layer(v) <NEW_LINE> q = self.q_layer(q) <NEW_LINE> k = k.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) <NEW_LINE> v = v.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) <NEW_LINE> q = q.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) <NEW_LINE> q = q / math.sqrt(self.head_size) <NEW_LINE> scores = torch.matmul(q, k.transpose(2, 3)) <NEW_LINE> if mask is not None: <NEW_LINE> <INDENT> scores = scores.masked_fill(~mask.unsqueeze(1), float('-inf')) <NEW_LINE> <DEDENT> attention = self.softmax(scores) <NEW_LINE> attention = self.dropout(attention) <NEW_LINE> context = torch.matmul(attention, v) <NEW_LINE> context = context.transpose(1, 2).contiguous().view( batch_size, -1, num_heads * self.head_size) <NEW_LINE> output = self.output_layer(context) <NEW_LINE> return output
Multi-Head Attention module from "Attention is All You Need" Implementation modified from OpenNMT-py. https://github.com/OpenNMT/OpenNMT-py
625990598e7ae83300eea657
class ProbabilityScale(ScaleBase): <NEW_LINE> <INDENT> name = 'probability' <NEW_LINE> def __init__(self, axis, dist, nonpos='mask', percentage=True, **kwargs): <NEW_LINE> <INDENT> if nonpos not in ['mask', 'clip']: <NEW_LINE> <INDENT> raise ValueError("nonposx, nonposy kwarg must be 'mask' or 'clip'") <NEW_LINE> <DEDENT> self._transform = ProbabilityTransform(dist, nonpos, **kwargs) <NEW_LINE> self.percentage = percentage <NEW_LINE> <DEDENT> def get_transform(self): <NEW_LINE> <INDENT> return self._transform <NEW_LINE> <DEDENT> def set_default_locators_and_formatters(self, axis): <NEW_LINE> <INDENT> axis.set_minor_formatter(ProbabilityFormatter(percentage=self.percentage)) <NEW_LINE> axis.set_major_formatter(ProbabilityFormatter(percentage=self.percentage)) <NEW_LINE> axis.set_major_locator(ProbabilityLocator()) <NEW_LINE> axis.set_minor_locator(ProbabilityLocator(minor=True)) <NEW_LINE> <DEDENT> def limit_range_for_scale(self, vmin, vmax, minpos): <NEW_LINE> <INDENT> return (vmin <= 0 and minpos or vmin, vmax >= 1 and (1 - minpos) or vmax)
Probability scale for data between zero and one, both excluded. Send p -> dist.ppf(p) It maps the interval ]0, 1[ onto ]-infty, +infty[.
62599059e64d504609df9eb4
class LikeDislike(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> unique_together = (('article', 'user')) <NEW_LINE> <DEDENT> user = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> article = models.ForeignKey(Article, on_delete=models.CASCADE) <NEW_LINE> like = models.BooleanField() <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True)
Like and dislike data model
62599059460517430c432b37
class GnuLibLinker(GnuFileCompiler): <NEW_LINE> <INDENT> name = "AR" <NEW_LINE> in_type = GnuObjectFileType.get() <NEW_LINE> out_type = bkl.compilers.NativeLibFileType.get() <NEW_LINE> def commands(self, toolset, target, input, output): <NEW_LINE> <INDENT> return [ListExpr([LiteralExpr("$(AR) rcu $@"), input]), ListExpr([LiteralExpr("$(RANLIB) $@")])]
GNU library linker.
625990598e71fb1e983bd094
class BaOracleSQLContextManager: <NEW_LINE> <INDENT> def __init__(self, host="192.168.12.61", user="BI", password="CREATING2020", tnsnname="ORCL"): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> self.tnsnname = tnsnname <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.db_conn = cx_Oracle.connect('{user}/{password}@{host}/{tnsnname}' .format(user=self.user,password=self.password,host=self.host,tnsnname=self.tnsnname)) <NEW_LINE> self.cursor = self.db_conn.cursor() <NEW_LINE> return self.cursor <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if exc_type: <NEW_LINE> <INDENT> self.db_conn.rollback() <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.db_conn.commit() <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.cursor.close() <NEW_LINE> self.db_conn.close()
oraclesQL Context Manager
625990592ae34c7f260ac6b2
class IsAccountOwner(BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> return request.user == obj
Verify that object is the user owner
6259905991f36d47f2231975
class CommunityCardUserForeignKeyRawIdWidget(CardUserForeignKeyRawIdWidget): <NEW_LINE> <INDENT> rel_to = CommunityCardUserProxyModel
Overrides ``django.contrib.admin.widgets.ForeignKeyRawIdWidget`` to provide a ``raw_id_fields`` option in admin interface classes for those places where a user filtering by particular user types is needed. The widget actually will show a list of ``cyclos.User``s filtered to those who are in CYCLOS_CARD_USER_MEMBER_GROUPS settings defined groups of users.
62599059d53ae8145f919a2c
class Organizations(DSLdapObjects): <NEW_LINE> <INDENT> def __init__(self, instance, basedn): <NEW_LINE> <INDENT> super(Organizations, self).__init__(instance) <NEW_LINE> self._objectclasses = [ 'organization', ] <NEW_LINE> self._filterattrs = [RDN] <NEW_LINE> self._childobject = Organization <NEW_LINE> self._basedn = basedn
DSLdapObjects that represents Organizations entry :param instance: An instance :type instance: lib389.DirSrv :param basedn: Base DN for all group entries below :type basedn: str
6259905915baa7234946355d
class ApikeyElement: <NEW_LINE> <INDENT> key: str <NEW_LINE> type: Optional[str] <NEW_LINE> value: Any <NEW_LINE> def __init__(self, key: str, type: Optional[str], value: Any) -> None: <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.type = type <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_dict(obj: Any) -> 'ApikeyElement': <NEW_LINE> <INDENT> assert isinstance(obj, dict) <NEW_LINE> key = from_str(obj.get("key")) <NEW_LINE> type = from_union([from_str, from_none], obj.get("type")) <NEW_LINE> value = obj.get("value") <NEW_LINE> return ApikeyElement(key, type, value) <NEW_LINE> <DEDENT> def to_dict(self) -> dict: <NEW_LINE> <INDENT> result: dict = {} <NEW_LINE> result["key"] = from_str(self.key) <NEW_LINE> result["type"] = from_union([from_str, from_none], self.type) <NEW_LINE> result["value"] = self.value <NEW_LINE> return result
Represents an attribute for any authorization method provided by Postman. For example `username` and `password` are set as auth attributes for Basic Authentication method.
6259905999cbb53fe68324aa
class Smoosher(BaseObject): <NEW_LINE> <INDENT> def __init__(self, universal=False, equal_char=False, underscore=False, hierarchy=False, opposite_pair=False, big_x=False, hardblank=False, vertical_equal_char=False, vertical_underscore=False, vertical_hierarchy=False, horizontal_line=False, vertical_line=False, unknown_char='?', **kwargs): <NEW_LINE> <INDENT> super(Smoosher, self).__init__(**kwargs) <NEW_LINE> self.universal = universal <NEW_LINE> self.equal_char = equal_char <NEW_LINE> self.underscore = underscore <NEW_LINE> self.hierarchy = hierarchy <NEW_LINE> self.opposite_pair = opposite_pair <NEW_LINE> self.big_x = big_x <NEW_LINE> self.hardblank = hardblank <NEW_LINE> self.vertical_equal_char = equal_char <NEW_LINE> self.vertical_underscore = underscore <NEW_LINE> self.vertical_hierarchy = hierarchy <NEW_LINE> self.horizontal_line = horizontal_line <NEW_LINE> self.vertical_line = vertical_line <NEW_LINE> self.unknown_char = unknown_char <NEW_LINE> self.rules = [_smoosh_spaces] <NEW_LINE> <DEDENT> def smoosh(self, left, right): <NEW_LINE> <INDENT> outchars = [] <NEW_LINE> for lc, rc in zip(left, right): <NEW_LINE> <INDENT> for rule in self.rules: <NEW_LINE> <INDENT> sc = rule(lc, rc) <NEW_LINE> if sc is not None: <NEW_LINE> <INDENT> outchars.append(sc) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ''.join(outchars)
Uses various rules to combine characters into a single character.
62599059009cb60464d02b00
class ReservableResource(BaseResource): <NEW_LINE> <INDENT> def __init__(self, name, sync, flag=None): <NEW_LINE> <INDENT> super(ReservableResource, self).__init__(name, flag=flag) <NEW_LINE> self.sync = sync
Describe a reservable resource.
62599059507cdc57c63a6370
class Invite(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'invite' <NEW_LINE> <DEDENT> invite_id = models.AutoField(primary_key=True) <NEW_LINE> created_by = models.ForeignKey('users.User', related_name='+', on_delete=models.CASCADE) <NEW_LINE> created_on = models.DateTimeField(auto_now_add=True) <NEW_LINE> report = models.ForeignKey(Report, on_delete=models.CASCADE) <NEW_LINE> user = models.ForeignKey('users.User', related_name='invites', on_delete=models.CASCADE)
An invitation to review a report. Arbitrary people can be invited (via email) to review and leave comments on a report.
62599059a17c0f6771d5d687
class ToolFeatureParameterDefinitionMapping(ImportMapping): <NEW_LINE> <INDENT> MAP_TYPE = "ToolFeatureParameterDefinition" <NEW_LINE> def _import_row(self, source_data, state, mapped_data): <NEW_LINE> <INDENT> tool_feature = state[ImportKey.TOOL_FEATURE] <NEW_LINE> parameter = str(source_data) <NEW_LINE> tool_feature.append(parameter)
Maps tool feature parameter definitions. Cannot be used as the topmost mapping; must have :class:`ToolFeatureEntityClassMapping` as parent.
625990598e71fb1e983bd095
class UpdateOrganisationResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'validation_errors': 'list[str]' } <NEW_LINE> attribute_map = { 'validation_errors': 'ValidationErrors' } <NEW_LINE> def __init__(self, validation_errors=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._validation_errors = None <NEW_LINE> self.discriminator = None <NEW_LINE> if validation_errors is not None: <NEW_LINE> <INDENT> self.validation_errors = validation_errors <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def validation_errors(self): <NEW_LINE> <INDENT> return self._validation_errors <NEW_LINE> <DEDENT> @validation_errors.setter <NEW_LINE> def validation_errors(self, validation_errors): <NEW_LINE> <INDENT> self._validation_errors = validation_errors <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, UpdateOrganisationResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, UpdateOrganisationResponse): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259905916aa5153ce401aaf
class DomainHostObj(objref) : <NEW_LINE> <INDENT> id = 0 <NEW_LINE> name = ''
Object to link domain nameserver to host object
6259905945492302aabfdaa3
class ReadLengthViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = ReadLength.objects.filter(obsolete=settings.NON_OBSOLETE) <NEW_LINE> serializer_class = ReadLengthSerializer
Get the list of read lengths.
6259905921bff66bcd724230
class KonnectedSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, device_id, data, sensor_type, addr=None, initial_state=None): <NEW_LINE> <INDENT> self._addr = addr <NEW_LINE> self._data = data <NEW_LINE> self._device_id = device_id <NEW_LINE> self._type = sensor_type <NEW_LINE> self._pin_num = self._data.get(CONF_PIN) <NEW_LINE> self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] <NEW_LINE> self._unique_id = addr or "{}-{}-{}".format( device_id, self._pin_num, sensor_type ) <NEW_LINE> self._state = initial_state <NEW_LINE> if self._state: <NEW_LINE> <INDENT> self._state = round(float(self._state), 1) <NEW_LINE> <DEDENT> self._name = self._data.get(CONF_NAME) <NEW_LINE> if self._name: <NEW_LINE> <INDENT> self._name += " " + SENSOR_TYPES[sensor_type][0] <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit_of_measurement <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> entity_id_key = self._addr or self._type <NEW_LINE> self._data[entity_id_key] = self.entity_id <NEW_LINE> async_dispatcher_connect( self.hass, SIGNAL_SENSOR_UPDATE.format(self.entity_id), self.async_set_state ) <NEW_LINE> <DEDENT> @callback <NEW_LINE> def async_set_state(self, state): <NEW_LINE> <INDENT> if self._type == DEVICE_CLASS_HUMIDITY: <NEW_LINE> <INDENT> self._state = int(float(state)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = round(float(state), 1) <NEW_LINE> <DEDENT> self.async_schedule_update_ha_state()
Represents a Konnected DHT Sensor.
625990594428ac0f6e659b07
class CSVStatisticsBackend(AbstractStatisticsBackend): <NEW_LINE> <INDENT> _logger = logging.getLogger(__name__) <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2)) <NEW_LINE> <DEDENT> def write_data(self, data: Dict[str, OutputVariable]) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> output_dir = self._get_report_dir() <NEW_LINE> output_file = output_dir / "statistics.csv" <NEW_LINE> with output_file.open(mode="a") as csv_file: <NEW_LINE> <INDENT> field_names = [k for k, _ in data.items()] <NEW_LINE> csv_writer = csv.DictWriter( csv_file, fieldnames=field_names, quoting=csv.QUOTE_NONNUMERIC ) <NEW_LINE> if output_file.stat().st_size == 0: <NEW_LINE> <INDENT> csv_writer.writeheader() <NEW_LINE> <DEDENT> csv_writer.writerow({k: str(v.value) for k, v in data.items()}) <NEW_LINE> <DEDENT> <DEDENT> except OSError as error: <NEW_LINE> <INDENT> logging.warning("Error while writing statistics: %s", error) <NEW_LINE> <DEDENT> <DEDENT> def _get_report_dir(self) -> Path: <NEW_LINE> <INDENT> report_dir = Path(config.INSTANCE.report_dir).absolute() <NEW_LINE> if not report_dir.exists(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> report_dir.mkdir(parents=True, exist_ok=True) <NEW_LINE> <DEDENT> except OSError as exception: <NEW_LINE> <INDENT> msg = "Cannot create report dir %s", config.INSTANCE.report_dir <NEW_LINE> self._logger.error(msg) <NEW_LINE> raise RuntimeError(msg) from exception <NEW_LINE> <DEDENT> <DEDENT> return report_dir
A statistics backend writing all (selected) output variables to a CSV file.
6259905907f4c71912bb0a06
class ConversionError(FieldError, TypeError): <NEW_LINE> <INDENT> pass
Exception raised when data cannot be converted to the correct python type
62599059e5267d203ee6cea6
class CommandCollection(): <NEW_LINE> <INDENT> def __init__(self, db): <NEW_LINE> <INDENT> self.db = db <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> documents = self.db.commands.find({'$or': [{'status': enums.Status.PENDING}, {'status': enums.Status.RUNNING}]}) <NEW_LINE> for document in documents: <NEW_LINE> <INDENT> self.db.commands.update_one({'_id': document['_id']}, {'$set': {'status': enums.Status.FAILED}}) <NEW_LINE> <DEDENT> <DEDENT> def delete(self, sha256_digest, scale, command, args): <NEW_LINE> <INDENT> return self.db.commands.delete_many({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}) <NEW_LINE> <DEDENT> def insert(self, document): <NEW_LINE> <INDENT> return self.db.commands.insert_one(document) <NEW_LINE> <DEDENT> def select(self, sha256_digest, scale, command, args): <NEW_LINE> <INDENT> return self.db.commands.find_one({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}) <NEW_LINE> <DEDENT> def select_many(self, sha256_digest=None, scale=None, command=None, args=None): <NEW_LINE> <INDENT> data = {"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args} <NEW_LINE> keys = [k for k, v in data.items() if v is None] <NEW_LINE> for k in keys: <NEW_LINE> <INDENT> del data[k] <NEW_LINE> <DEDENT> return self.db.commands.find(data) <NEW_LINE> <DEDENT> def select_all(self): <NEW_LINE> <INDENT> return self.db.commands.find() <NEW_LINE> <DEDENT> def update(self, sha256_digest, scale, command, args, data): <NEW_LINE> <INDENT> return self.db.commands.update_one({"sha256_digest": sha256_digest, "scale": scale, "command": command, "args": args}, {'$set': data})
Synchronous Command Collection. Attributes: db (obj): The database object
625990599c8ee82313040c70
class GetDevicesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ProductId = None <NEW_LINE> self.Offset = None <NEW_LINE> self.Length = None <NEW_LINE> self.Keyword = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ProductId = params.get("ProductId") <NEW_LINE> self.Offset = params.get("Offset") <NEW_LINE> self.Length = params.get("Length") <NEW_LINE> self.Keyword = params.get("Keyword") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
GetDevices请求参数结构体
62599059d486a94d0ba2d594
class IndexView(generic.ListView): <NEW_LINE> <INDENT> template_name = 'music/index.html' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return [1] <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(IndexView, self).get_context_data(**kwargs) <NEW_LINE> context['records'] = Record.objects.all()[:10] <NEW_LINE> context['ordered_bands'] = Band.objects.order_by('name')[:15] <NEW_LINE> return context
View for displaying home page.
6259905907d97122c4218270
class IsOwnerOrReadOnly(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.user == request.user
删除的时候验证一个权限问题 自定义的一个权限方法,用于判断请求用户是否和数据库中的当前用户相同
62599059097d151d1a2c2638
class BugMessageAddFormView(LaunchpadFormView, BugAttachmentContentCheck): <NEW_LINE> <INDENT> schema = IBugMessageAddForm <NEW_LINE> initial_focus_widget = None <NEW_LINE> @property <NEW_LINE> def label(self): <NEW_LINE> <INDENT> return 'Add a comment or attachment to bug #%d' % self.context.bug.id <NEW_LINE> <DEDENT> @property <NEW_LINE> def page_title(self): <NEW_LINE> <INDENT> return self.label <NEW_LINE> <DEDENT> @property <NEW_LINE> def initial_values(self): <NEW_LINE> <INDENT> return dict(subject=self.context.bug.followup_subject()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def action_url(self): <NEW_LINE> <INDENT> return "%s/+addcomment" % canonical_url(self.context) <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> if len(self.errors) == 0: <NEW_LINE> <INDENT> comment = data.get('comment') or u'' <NEW_LINE> filecontent = data.get('filecontent', None) <NEW_LINE> if not comment.strip() and not filecontent: <NEW_LINE> <INDENT> self.addError("Either a comment or attachment " "must be provided.") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @action(u"Post Comment", name='save') <NEW_LINE> def save_action(self, action, data): <NEW_LINE> <INDENT> bug = self.context.bug <NEW_LINE> if data.get('email_me'): <NEW_LINE> <INDENT> bug.subscribe(self.user, self.user) <NEW_LINE> <DEDENT> file_ = self.request.form.get(self.widgets['filecontent'].name) <NEW_LINE> message = None <NEW_LINE> if data['comment'] or file_: <NEW_LINE> <INDENT> bugwatch_id = data.get('bugwatch_id') <NEW_LINE> if bugwatch_id is not None: <NEW_LINE> <INDENT> bugwatch = getUtility(IBugWatchSet).get(bugwatch_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bugwatch = None <NEW_LINE> <DEDENT> message = bug.newMessage(subject=data.get('subject'), content=data['comment'], owner=self.user, bugwatch=bugwatch) <NEW_LINE> if data['comment']: <NEW_LINE> <INDENT> self.request.response.addNotification( "Thank you for your comment.") <NEW_LINE> <DEDENT> <DEDENT> self.next_url = canonical_url(self.context) <NEW_LINE> if file_: <NEW_LINE> <INDENT> filename = file_.filename.replace('/', '-') <NEW_LINE> file_description = None <NEW_LINE> if 'attachment_description' in data: <NEW_LINE> <INDENT> file_description = data['attachment_description'] <NEW_LINE> <DEDENT> if not file_description: <NEW_LINE> <INDENT> file_description = filename <NEW_LINE> <DEDENT> patch_flag_consistent = ( self.attachmentTypeConsistentWithContentType( data['patch'], filename, data['filecontent'])) <NEW_LINE> if not patch_flag_consistent: <NEW_LINE> <INDENT> guessed_type = self.guessContentType( filename, data['filecontent']) <NEW_LINE> is_patch = guessed_type == 'text/x-diff' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_patch = data['patch'] <NEW_LINE> <DEDENT> attachment = bug.addAttachment( owner=self.user, data=StringIO(data['filecontent']), filename=filename, description=file_description, comment=message, is_patch=is_patch) <NEW_LINE> if not patch_flag_consistent: <NEW_LINE> <INDENT> self.next_url = self.nextUrlForInconsistentPatchFlags( attachment) <NEW_LINE> <DEDENT> self.request.response.addNotification( "Attachment %s added to bug." % filename) <NEW_LINE> <DEDENT> <DEDENT> def shouldShowEmailMeWidget(self): <NEW_LINE> <INDENT> return not self.context.bug.isSubscribed(self.user)
Browser view class for adding a bug comment/attachment.
6259905973bcbd0ca4bcb85f
class WorksHandler(webapp.RequestHandler): <NEW_LINE> <INDENT> def get(self, work_id): <NEW_LINE> <INDENT> work = models.Work.get_by_id(int(work_id)) <NEW_LINE> scenes = models.Scene.all().ancestor(work).order('act_num').order('scene_num') <NEW_LINE> path = os.path.join(os.path.dirname(__file__), 'templates', 'work.html') <NEW_LINE> self.response.out.write(template.render(path, {'work':work, 'scenes':scenes}))
A simple handler to return the TOC for a work.
62599059cb5e8a47e493cc6c
class DeleteRecordResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
DeleteRecord返回参数结构体
625990593cc13d1c6d466d0c
class ThinkGearMeditationData(ThinkGearData): <NEW_LINE> <INDENT> code = 0x05 <NEW_LINE> _strfmt = '%(value)s' <NEW_LINE> _decode = staticmethod(ord)
MEDITATION eSense (0 to 100)
6259905938b623060ffaa334
class ImageTk_Missing(Error): <NEW_LINE> <INDENT> pass
debugging aid to track calls
6259905932920d7e50bc7612
class SourceTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.new_source = Source("crypto-coins-news","Crypto Coins News","Providing breaking cryptocurrency news.", "https://www.ccn.com","technology","en","us") <NEW_LINE> <DEDENT> def test_instance(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(self.new_source,Source)) <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> self.assertEqual(self.new_source.id,"crypto-coins-news") <NEW_LINE> self.assertEqual(self.new_source.name,"Crypto Coins News") <NEW_LINE> self.assertEqual(self.new_source.description,"Providing breaking cryptocurrency news.") <NEW_LINE> self.assertEqual(self.new_source.url,"https://www.ccn.com") <NEW_LINE> self.assertEqual(self.new_source.category,"technology") <NEW_LINE> self.assertEqual(self.new_source.language,"en") <NEW_LINE> self.assertEqual(self.new_source.country,"us")
Test Class to test the behaviour of the Movie class
62599059379a373c97d9a5f1
class ssStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.CreateUser = channel.unary_unary( "/aioshadowsocks.ss/CreateUser", request_serializer=aioshadowsocks__pb2.UserReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.User.FromString, ) <NEW_LINE> self.UpdateUser = channel.unary_unary( "/aioshadowsocks.ss/UpdateUser", request_serializer=aioshadowsocks__pb2.UserReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.User.FromString, ) <NEW_LINE> self.GetUser = channel.unary_unary( "/aioshadowsocks.ss/GetUser", request_serializer=aioshadowsocks__pb2.UserIdReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.User.FromString, ) <NEW_LINE> self.DeleteUser = channel.unary_unary( "/aioshadowsocks.ss/DeleteUser", request_serializer=aioshadowsocks__pb2.UserIdReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.Empty.FromString, ) <NEW_LINE> self.ListUser = channel.unary_unary( "/aioshadowsocks.ss/ListUser", request_serializer=aioshadowsocks__pb2.UserReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.UserList.FromString, ) <NEW_LINE> self.HealthCheck = channel.unary_stream( "/aioshadowsocks.ss/HealthCheck", request_serializer=aioshadowsocks__pb2.HealthCheckReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.HealthCheckRes.FromString, ) <NEW_LINE> self.FindAccessUser = channel.unary_unary( "/aioshadowsocks.ss/FindAccessUser", request_serializer=aioshadowsocks__pb2.FindAccessUserReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.User.FromString, ) <NEW_LINE> self.DecryptData = channel.unary_unary( "/aioshadowsocks.ss/DecryptData", request_serializer=aioshadowsocks__pb2.DecryptDataReq.SerializeToString, response_deserializer=aioshadowsocks__pb2.DecryptDataRes.FromString, )
service
625990594a966d76dd5f04be
class Pi(NumberSymbol, metaclass=Singleton): <NEW_LINE> <INDENT> is_real = True <NEW_LINE> is_positive = True <NEW_LINE> is_negative = False <NEW_LINE> is_irrational = True <NEW_LINE> is_number = True <NEW_LINE> is_algebraic = False <NEW_LINE> is_transcendental = True <NEW_LINE> __slots__ = () <NEW_LINE> def _latex(self, printer): <NEW_LINE> <INDENT> return r"\pi" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __abs__(): <NEW_LINE> <INDENT> return S.Pi <NEW_LINE> <DEDENT> def __int__(self): <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> def _as_mpf_val(self, prec): <NEW_LINE> <INDENT> return mpf_pi(prec) <NEW_LINE> <DEDENT> def approximation_interval(self, number_cls): <NEW_LINE> <INDENT> if issubclass(number_cls, Integer): <NEW_LINE> <INDENT> return (Integer(3), Integer(4)) <NEW_LINE> <DEDENT> elif issubclass(number_cls, Rational): <NEW_LINE> <INDENT> return (Rational(223, 71, 1), Rational(22, 7, 1))
The `\pi` constant. Explanation =========== The transcendental number `\pi = 3.141592654\ldots` represents the ratio of a circle's circumference to its diameter, the area of the unit circle, the half-period of trigonometric functions, and many other things in mathematics. Pi is a singleton, and can be accessed by ``S.Pi``, or can be imported as ``pi``. Examples ======== >>> from sympy import S, pi, oo, sin, exp, integrate, Symbol >>> S.Pi pi >>> pi > 3 True >>> pi.is_irrational True >>> x = Symbol('x') >>> sin(x + 2*pi) sin(x) >>> integrate(exp(-x**2), (x, -oo, oo)) sqrt(pi) References ========== .. [1] https://en.wikipedia.org/wiki/Pi
625990593617ad0b5ee07717
class DataDrop(BaseProtocol): <NEW_LINE> <INDENT> PORT = 8007 <NEW_LINE> def dataReceived(self, data): <NEW_LINE> <INDENT> if data.find("favico") >= 0: <NEW_LINE> <INDENT> self.write("HTTP/1.1 404 Not Found\r\n\r\n") <NEW_LINE> self.transport.loseConnection() <NEW_LINE> return <NEW_LINE> <DEDENT> BaseProtocol.dataReceived(self, data) <NEW_LINE> self.write("HTTP/1.1 200 OK\r\n") <NEW_LINE> self.write("\r\n") <NEW_LINE> self.transport.loseConnection()
Drop connection in body
6259905991f36d47f2231976
class rv_discrete_float(object): <NEW_LINE> <INDENT> def __init__(self, xk, pk): <NEW_LINE> <INDENT> self.xk = xk <NEW_LINE> self.pk = pk <NEW_LINE> self.cpk = np.cumsum(self.pk, axis=1) <NEW_LINE> <DEDENT> def rvs(self, n=None): <NEW_LINE> <INDENT> n = self.xk.shape[0] <NEW_LINE> u = np.random.uniform(size=n) <NEW_LINE> ix = (self.cpk < u[:, None]).sum(1) <NEW_LINE> ii = np.arange(n, dtype=np.int32) <NEW_LINE> return self.xk[(ii,ix)] <NEW_LINE> <DEDENT> def mean(self): <NEW_LINE> <INDENT> return (self.xk * self.pk).sum(1) <NEW_LINE> <DEDENT> def var(self): <NEW_LINE> <INDENT> mn = self.mean() <NEW_LINE> xkc = self.xk - mn[:, None] <NEW_LINE> return (self.pk * (self.xk - xkc)**2).sum(1) <NEW_LINE> <DEDENT> def std(self): <NEW_LINE> <INDENT> return np.sqrt(self.var())
A class representing a collection of discrete distributions. Parameters ---------- xk : 2d array_like The support points, should be non-decreasing within each row. pk : 2d array_like The probabilities, should sum to one within each row. Notes ----- Each row of `xk`, and the corresponding row of `pk` describe a discrete distribution. `xk` and `pk` should both be two-dimensional ndarrays. Each row of `pk` should sum to 1. This class is used as a substitute for scipy.distributions. rv_discrete, since that class does not allow non-integer support points, or vectorized operations. Only a limited number of methods are implemented here compared to the other scipy distribution classes.
62599059baa26c4b54d50871
class _LastTxnTime(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._lock = threading.Lock() <NEW_LINE> self._time = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def time(self): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> return self._time <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def request_header(self): <NEW_LINE> <INDENT> t = self.time <NEW_LINE> if t is None: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return {"X-Last-Txn-Time": str(t)} <NEW_LINE> <DEDENT> def update_txn_time(self, new_txn_time): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if self._time is None: <NEW_LINE> <INDENT> self._time = new_txn_time <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._time = max(self._time, new_txn_time)
Wraps tracking the last transaction time supplied from the database.
625990590fa83653e46f64b2
class s(object): <NEW_LINE> <INDENT> def __init__(self,dictionary): <NEW_LINE> <INDENT> for key, value in dictionary.items(): <NEW_LINE> <INDENT> setattr(self, key, value)
class with symbol attributes
62599059adb09d7d5dc0bb37
class UpdateFileMemberArgs(ChangeFileMemberAccessArgs): <NEW_LINE> <INDENT> __slots__ = [ ] <NEW_LINE> _has_required_fields = True <NEW_LINE> def __init__(self, file=None, member=None, access_level=None): <NEW_LINE> <INDENT> super(UpdateFileMemberArgs, self).__init__(file, member, access_level) <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(UpdateFileMemberArgs, self)._process_custom_annotations(annotation_type, field_path, processor)
Arguments for :meth:`dropbox.dropbox_client.Dropbox.sharing_update_file_member`.
6259905999cbb53fe68324ac
class UserDataCreate(generic.CreateView): <NEW_LINE> <INDENT> form_class = UserCreateForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> year = self.kwargs.get('year') <NEW_LINE> month = self.kwargs.get('month') <NEW_LINE> day = self.kwargs.get('day') <NEW_LINE> hour = self.kwargs.get('hour') <NEW_LINE> start = datetime.datetime(year=year, month=month, day=day, hour=hour) <NEW_LINE> end = datetime.datetime(year=year, month=month, day=day, hour=hour + 1) <NEW_LINE> if Schedule.objects.filter(start=start).exists(): messages.error(self.request, 'すみません、入れ違いで予約がありました。別の日時はどうですか。') <NEW_LINE> else: <NEW_LINE> <INDENT> schedule = form.save(commit=False) <NEW_LINE> schedule.start = start <NEW_LINE> schedule.end = end <NEW_LINE> schedule.save() <NEW_LINE> <DEDENT> return render(self.request, 'booking/last.html', {'form': form, 'year':year, 'month':month, 'day':day, 'hour':hour}) <NEW_LINE> <DEDENT> success_url = reverse_lazy('booking:calendar') <NEW_LINE> def form_invalid(self, form): <NEW_LINE> <INDENT> return render(self.request, 'booking/user_data_input.html', {'form': form})
ユーザーデータの登録ビュー。ここ以外では、CreateViewを使わないでください
6259905991af0d3eaad3b3f5
class RepoConfederaciones(object): <NEW_LINE> <INDENT> def __init__(self, configuracion, cliente_mongo): <NEW_LINE> <INDENT> self.cfg = configuracion <NEW_LINE> self.bd_mongo = cliente_mongo <NEW_LINE> <DEDENT> def obtener_confederacion(self, codigo_confederacion): <NEW_LINE> <INDENT> return self.bd_mongo.confederaciones.find_one({'codigo': codigo_confederacion}) <NEW_LINE> <DEDENT> def obtener_confederaciones(self): <NEW_LINE> <INDENT> return list(self.bd_mongo.confederaciones.find({})) <NEW_LINE> <DEDENT> def obtener_proveedor_saih(self, confederacion): <NEW_LINE> <INDENT> chc = CHCantabrico(self.cfg) <NEW_LINE> chsil = CHSil() <NEW_LINE> chebro = CHEbro() <NEW_LINE> if confederacion['alias'] == chc.tipo_proveedor(): <NEW_LINE> <INDENT> return chc <NEW_LINE> <DEDENT> if confederacion['alias'] == chsil.tipo_proveedor(): <NEW_LINE> <INDENT> return chsil <NEW_LINE> <DEDENT> if confederacion['alias'] == chebro.tipo_proveedor(): <NEW_LINE> <INDENT> return chebro <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def obtener_proveedor_boyas(self): <NEW_LINE> <INDENT> boyas_puertos = BoyasPuertos() <NEW_LINE> return boyas_puertos
Clase para gestionar la consulta e insercion de datos de Confederaciones
62599059627d3e7fe0e08459
class ExposeSensor(Device): <NEW_LINE> <INDENT> def __init__(self, xknx, name, group_address=None, value_type=None, device_updated_cb=None): <NEW_LINE> <INDENT> super().__init__(xknx, name, device_updated_cb) <NEW_LINE> self.sensor_value = None <NEW_LINE> if value_type == "binary": <NEW_LINE> <INDENT> self.sensor_value = RemoteValueSwitch( xknx, group_address=group_address, device_name=self.name, after_update_cb=self.after_update) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sensor_value = RemoteValueSensor( xknx, group_address=group_address, device_name=self.name, after_update_cb=self.after_update, value_type=value_type) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_config(cls, xknx, name, config): <NEW_LINE> <INDENT> group_address = config.get('group_address') <NEW_LINE> value_type = config.get('value_type') <NEW_LINE> return cls(xknx, name, group_address=group_address, value_type=value_type) <NEW_LINE> <DEDENT> def has_group_address(self, group_address): <NEW_LINE> <INDENT> return self.sensor_value.has_group_address(group_address) <NEW_LINE> <DEDENT> def state_addresses(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> async def process_group_read(self, telegram): <NEW_LINE> <INDENT> await self.sensor_value.send(response=True) <NEW_LINE> <DEDENT> async def set(self, value): <NEW_LINE> <INDENT> await self.sensor_value.set(value) <NEW_LINE> <DEDENT> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self.sensor_value.unit_of_measurement <NEW_LINE> <DEDENT> def resolve_state(self): <NEW_LINE> <INDENT> return self.sensor_value.value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<ExposeSensor name="{0}" ' 'sensor="{1}" value="{2}" unit="{3}"/>' .format(self.name, self.sensor_value.group_addr_str(), self.resolve_state(), self.unit_of_measurement()) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__
Class for managing a sensor.
625990598e71fb1e983bd097
class AlphaBetaPlayer(IsolationPlayer): <NEW_LINE> <INDENT> def get_move(self, game, time_left): <NEW_LINE> <INDENT> self.time_left = time_left <NEW_LINE> best_move = (-1,-1) <NEW_LINE> try: <NEW_LINE> <INDENT> depth = 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> best_move = self.alphabeta(game, depth) <NEW_LINE> depth += 1 <NEW_LINE> <DEDENT> <DEDENT> except SearchTimeout: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def alphabeta(self, game, depth, alpha=float("-inf"), beta=float("inf")): <NEW_LINE> <INDENT> if self.terminal_test(game): <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> best_score = float("-inf") <NEW_LINE> best_move = game.get_legal_moves()[0] <NEW_LINE> for m in game.get_legal_moves(): <NEW_LINE> <INDENT> v = self.min_value(game.forecast_move(m), depth - 1, alpha, beta) <NEW_LINE> if v > best_score: <NEW_LINE> <INDENT> best_score = v <NEW_LINE> best_move = m <NEW_LINE> <DEDENT> alpha = max(alpha, v) <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def terminal_test(self, game): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> return not bool(game.get_legal_moves()) <NEW_LINE> <DEDENT> def min_value(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> if self.terminal_test(game): <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> v = float("inf") <NEW_LINE> if depth <= 0: <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> for m in game.get_legal_moves(): <NEW_LINE> <INDENT> v = min(v, self.max_value(game.forecast_move(m), depth - 1, alpha, beta)) <NEW_LINE> if v <= alpha: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> beta = min(beta, v) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def max_value(self, game, depth, alpha, beta): <NEW_LINE> <INDENT> if self.terminal_test(game): <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> v = float("-inf") <NEW_LINE> if depth <= 0: <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> for m in game.get_legal_moves(): <NEW_LINE> <INDENT> v = max(v, self.min_value(game.forecast_move(m), depth - 1, alpha, beta)) <NEW_LINE> if v >= beta: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> alpha = max(alpha, v) <NEW_LINE> <DEDENT> return v
Game-playing agent that chooses a move using iterative deepening minimax search with alpha-beta pruning. You must finish and test this player to make sure it returns a good move before the search time limit expires.
62599059a219f33f346c7dd2
class ActiveCampaignManager(models.Manager): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> return ActiveCampaigns.objects.all()
Manager Campaign filter by Active status
6259905916aa5153ce401ab1
class TimeStamp_db_mixin(object): <NEW_LINE> <INDENT> ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' <NEW_LINE> def _change_since_result_filter_hook(self, query, filters): <NEW_LINE> <INDENT> values = filters and filters.get('changed_since', []) <NEW_LINE> if not values: <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> data = filters['changed_since'][0] <NEW_LINE> try: <NEW_LINE> <INDENT> datetime.datetime.fromtimestamp(time.mktime( time.strptime(data, self.ISO8601_TIME_FORMAT))) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> msg = _LW("The input changed_since must be in the " "following format: YYYY-MM-DDTHH:MM:SS") <NEW_LINE> raise n_exc.InvalidInput(error_message=msg) <NEW_LINE> <DEDENT> changed_since_string = timeutils.parse_isotime(data) <NEW_LINE> changed_since = (timeutils. normalize_time(changed_since_string)) <NEW_LINE> target_model_class = list(query._mapper_adapter_map.keys())[0] <NEW_LINE> query = query.join(model_base.StandardAttribute, target_model_class.standard_attr_id == model_base.StandardAttribute.id).filter( model_base.StandardAttribute.updated_at >= changed_since) <NEW_LINE> return query <NEW_LINE> <DEDENT> def update_timestamp(self, session, context, instances): <NEW_LINE> <INDENT> objs_list = session.new.union(session.dirty) <NEW_LINE> while objs_list: <NEW_LINE> <INDENT> obj = objs_list.pop() <NEW_LINE> if (isinstance(obj, model_base.HasStandardAttributes) and obj.standard_attr_id): <NEW_LINE> <INDENT> obj.updated_at = timeutils.utcnow() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def register_db_events(self): <NEW_LINE> <INDENT> event.listen(model_base.StandardAttribute, 'before_insert', self._add_timestamp) <NEW_LINE> event.listen(se.Session, 'before_flush', self.update_timestamp) <NEW_LINE> <DEDENT> def unregister_db_events(self): <NEW_LINE> <INDENT> self._unregister_db_event(model_base.StandardAttribute, 'before_insert', self._add_timestamp) <NEW_LINE> self._unregister_db_event(se.Session, 'before_flush', self.update_timestamp) <NEW_LINE> <DEDENT> def _unregister_db_event(self, listen_obj, listened_event, listen_hander): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> event.remove(listen_obj, listened_event, listen_hander) <NEW_LINE> <DEDENT> except sql_exc.InvalidRequestError: <NEW_LINE> <INDENT> LOG.warning(_LW("No sqlalchemy event for resource %s found"), listen_obj) <NEW_LINE> <DEDENT> <DEDENT> def _format_timestamp(self, resource_db, result): <NEW_LINE> <INDENT> result['created_at'] = (resource_db.created_at. strftime(self.ISO8601_TIME_FORMAT)) <NEW_LINE> result['updated_at'] = (resource_db.updated_at. strftime(self.ISO8601_TIME_FORMAT)) <NEW_LINE> <DEDENT> def extend_resource_dict_timestamp(self, plugin_obj, resource_res, resource_db): <NEW_LINE> <INDENT> if (resource_db and resource_db.created_at and resource_db.updated_at): <NEW_LINE> <INDENT> self._format_timestamp(resource_db, resource_res) <NEW_LINE> <DEDENT> <DEDENT> def _add_timestamp(self, mapper, _conn, target): <NEW_LINE> <INDENT> if not target.created_at and not target.updated_at: <NEW_LINE> <INDENT> time = timeutils.utcnow() <NEW_LINE> for field in ['created_at', 'updated_at']: <NEW_LINE> <INDENT> setattr(target, field, time) <NEW_LINE> <DEDENT> <DEDENT> return target
Mixin class to add Time Stamp methods.
625990598da39b475be047b3
class RecognitionJobs(object): <NEW_LINE> <INDENT> def __init__(self, recognitions): <NEW_LINE> <INDENT> self.recognitions = recognitions <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'recognitions' in _dict: <NEW_LINE> <INDENT> args['recognitions'] = [ RecognitionJob._from_dict(x) for x in (_dict.get('recognitions')) ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'recognitions\' not present in RecognitionJobs JSON' ) <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'recognitions') and self.recognitions is not None: <NEW_LINE> <INDENT> _dict['recognitions'] = [x._to_dict() for x in self.recognitions] <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
RecognitionJobs. :attr list[RecognitionJob] recognitions: An array of objects that provides the status for each of the user's current jobs. The array is empty if the user has no current jobs.
62599059d486a94d0ba2d596
class BertForTokenClassification(BaseModel): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, embed: BertEmbedding, num_labels, dropout=0.1): <NEW_LINE> <INDENT> super(BertForTokenClassification, self).__init__() <NEW_LINE> self.num_labels = num_labels <NEW_LINE> self.bert = embed <NEW_LINE> self.dropout = nn.Dropout(p=dropout) <NEW_LINE> self.classifier = nn.Linear(self.bert.embedding_dim, num_labels) <NEW_LINE> if self.bert.model.include_cls_sep: <NEW_LINE> <INDENT> self.bert.model.include_cls_sep = False <NEW_LINE> warn_msg = "Bert for token classification excepts BertEmbedding `include_cls_sep` False, " "but got True. FastNLP has changed it to False." <NEW_LINE> logger.warning(warn_msg) <NEW_LINE> warnings.warn(warn_msg) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, words): <NEW_LINE> <INDENT> sequence_output = self.bert(words) <NEW_LINE> sequence_output = self.dropout(sequence_output) <NEW_LINE> logits = self.classifier(sequence_output) <NEW_LINE> return {Const.OUTPUT: logits} <NEW_LINE> <DEDENT> def predict(self, words): <NEW_LINE> <INDENT> logits = self.forward(words)[Const.OUTPUT] <NEW_LINE> return {Const.OUTPUT: torch.argmax(logits, dim=-1)}
BERT model for token classification.
6259905963d6d428bbee3d6e
class IxnOspfRouter(IxnProtocolRouter): <NEW_LINE> <INDENT> objType = 'router'
Represents IXN OSPF router.
62599059f7d966606f74939f
class Packetizable: <NEW_LINE> <INDENT> def packet_key(self): <NEW_LINE> <INDENT> pass
Implementing classes declare that they can be packetized and provide an identifying key
62599059379a373c97d9a5f2
class FilterAction(BaseAction): <NEW_LINE> <INDENT> name = "filter" <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(FilterAction, self).__init__(**kwargs) <NEW_LINE> self.method = kwargs.get('method', "POST") <NEW_LINE> self.name = kwargs.get('name', self.name) <NEW_LINE> self.verbose_name = kwargs.get('verbose_name', _("Filter")) <NEW_LINE> self.filter_type = kwargs.get('filter_type', "query") <NEW_LINE> self.needs_preloading = kwargs.get('needs_preloading', False) <NEW_LINE> self.param_name = kwargs.get('param_name', 'q') <NEW_LINE> <DEDENT> def get_param_name(self): <NEW_LINE> <INDENT> return "__".join([self.table.name, self.name, self.param_name]) <NEW_LINE> <DEDENT> def get_default_classes(self): <NEW_LINE> <INDENT> classes = super(FilterAction, self).get_default_classes() <NEW_LINE> classes += ("btn-search",) <NEW_LINE> return classes <NEW_LINE> <DEDENT> def assign_type_string(self, table, data, type_string): <NEW_LINE> <INDENT> for datum in data: <NEW_LINE> <INDENT> setattr(datum, table._meta.data_type_name, type_string) <NEW_LINE> <DEDENT> <DEDENT> def data_type_filter(self, table, data, filter_string): <NEW_LINE> <INDENT> filtered_data = [] <NEW_LINE> for data_type in table._meta.data_types: <NEW_LINE> <INDENT> func_name = "filter_%s_data" % data_type <NEW_LINE> filter_func = getattr(self, func_name, None) <NEW_LINE> if not filter_func and not callable(filter_func): <NEW_LINE> <INDENT> cls_name = self.__class__.__name__ <NEW_LINE> raise NotImplementedError("You must define a %s method " "for %s data type in %s." % (func_name, data_type, cls_name)) <NEW_LINE> <DEDENT> _data = filter_func(table, data, filter_string) <NEW_LINE> self.assign_type_string(table, _data, data_type) <NEW_LINE> filtered_data.extend(_data) <NEW_LINE> <DEDENT> return filtered_data <NEW_LINE> <DEDENT> def filter(self, table, data, filter_string): <NEW_LINE> <INDENT> raise NotImplementedError("The filter method has not been " "implemented by %s." % self.__class__)
A base class representing a filter action for a table. .. attribute:: name The short name or "slug" representing this action. Defaults to ``"filter"``. .. attribute:: verbose_name A descriptive name used for display purposes. Defaults to the value of ``name`` with the first letter of each word capitalized. .. attribute:: param_name A string representing the name of the request parameter used for the search term. Default: ``"q"``. .. attribute: filter_type A string representing the type of this filter. Default: ``"query"``. .. attribute: needs_preloading If True, the filter function will be called for the initial GET request with an empty ``filter_string``, regardless of the value of ``method``.
6259905963b5f9789fe86740
class SutraTextField(models.TextField): <NEW_LINE> <INDENT> description = '存储经文内容,换行用\n,每页前有换页标记p\n' <NEW_LINE> def get_prep_value(self, value): <NEW_LINE> <INDENT> value = value.replace('\r\n', '\n') <NEW_LINE> value = super().get_prep_value(value) <NEW_LINE> return self.to_python(value)
格式说明:存储经文内容,换行用 ,每页前有换页标记p 。读取处理原始数据 为 。
62599059e64d504609df9eb6
class DirichletParameter(Parameter): <NEW_LINE> <INDENT> def __init__( self, k: int = 2, shape: Union[int, List[int]] = [], posterior=Dirichlet, prior=None, transform=None, initializer={"concentration": pos_xavier}, var_transform={"concentration": O.softplus}, name="DirichletParameter", ): <NEW_LINE> <INDENT> if not isinstance(k, int): <NEW_LINE> <INDENT> raise TypeError("k must be an integer") <NEW_LINE> <DEDENT> if k < 2: <NEW_LINE> <INDENT> raise ValueError("k must be >1") <NEW_LINE> <DEDENT> if isinstance(shape, int): <NEW_LINE> <INDENT> shape = [shape] <NEW_LINE> <DEDENT> shape = shape + [k] <NEW_LINE> if prior is None: <NEW_LINE> <INDENT> prior = Dirichlet(O.ones(shape)) <NEW_LINE> <DEDENT> super().__init__( shape=shape, posterior=posterior, prior=prior, transform=transform, initializer=initializer, var_transform=var_transform, name=name, )
Dirichlet parameter. This is a convenience class for creating a parameter :math:`\theta` with a Dirichlet posterior: .. math:: \theta \sim \text{Dirichlet}(\mathbf{\alpha}) By default, a uniform Dirichlet prior is used: .. math:: \theta \sim \text{Dirichlet}_K(\mathbf{1}/K) TODO: explain that a sample is a categorical prob dist (as compared to CategoricalParameter, where a sample is a single value) Parameters ---------- k : int > 2 Number of categories. shape : int or List[int] Shape of the array containing the parameters. Default = ``1`` posterior : |Distribution| class Probability distribution class to use to approximate the posterior. Default = :class:`.Dirichlet` prior : |Distribution| object Prior probability distribution function which has been instantiated with parameters. Default = :class:`.Dirichlet` ``(1)`` transform : callable Transform to apply to the random variable. Default is to use no transform. initializer : Dict[str, callable] Initializer functions to use for each variable of the variational posterior distribution. Keys correspond to variable names (arguments to the distribution), and values contain functions to initialize those variables given ``shape`` as the single argument. var_transform : Dict[str, callable] Transform to apply to each variable of the variational posterior. name : str Name of the parameter(s). Default = ``'DirichletParameter'`` Examples -------- TODO: creating variable
62599059dd821e528d6da467
class SNMPException(Exception): <NEW_LINE> <INDENT> pass
SNMP related base exception. All SNMP exceptions are inherited from this one. The inherited exceptions are named after the name of the corresponding SNMP error.
625990593539df3088ecd86a
class Or(Filter): <NEW_LINE> <INDENT> def __init__(self, *filters): <NEW_LINE> <INDENT> self._filters = filters <NEW_LINE> <DEDENT> def __call__(self, event): <NEW_LINE> <INDENT> return any(f(event) for f in self._filters)
At least one underlying filter must return `True` for this filter to be `True`.
625990594e4d5625663739d5
class FbuserAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('username', 'email')
[어드민] 사용자
6259905955399d3f05627aee
class IsActive(BasePermission): <NEW_LINE> <INDENT> message = _("The account is deactivated!") <NEW_LINE> def has_permission(self, request, view): <NEW_LINE> <INDENT> return request.user and request.user.is_active
Allows access only to active users.
625990593539df3088ecd86b
class KnownHostsStore(BaseJSONStore): <NEW_LINE> <INDENT> def __init__(self, filename=None): <NEW_LINE> <INDENT> if filename is None: <NEW_LINE> <INDENT> filename = os.path.join(get_home(), 'known_hosts') <NEW_LINE> <DEDENT> super(KnownHostsStore, self).__init__(filename) <NEW_LINE> <DEDENT> def add(self, addr, server_key): <NEW_LINE> <INDENT> self.update({self._parse_addr(addr): server_key}) <NEW_LINE> <DEDENT> def serverkey(self, addr): <NEW_LINE> <INDENT> return self.load().get(self._parse_addr(addr), None) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_addr(addr): <NEW_LINE> <INDENT> url = urlparse.urlparse(addr) <NEW_LINE> if url.netloc: <NEW_LINE> <INDENT> return url.netloc <NEW_LINE> <DEDENT> return url.path
Handle storage and retrival of known hosts
6259905923849d37ff852694
class LinuxChromeOSPlatform(cr.Platform): <NEW_LINE> <INDENT> ACTIVE = cr.Config.From( CR_BINARY=os.path.join('{CR_BUILD_DIR}', '{CR_BUILD_TARGET}'), CHROME_DEVEL_SANDBOX='/usr/local/sbin/chrome-devel-sandbox', GN_ARG_target_os='"chromeos"', ) <NEW_LINE> @property <NEW_LINE> def enabled(self): <NEW_LINE> <INDENT> return cr.Platform.System() == 'Linux' <NEW_LINE> <DEDENT> @property <NEW_LINE> def priority(self): <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def paths(self): <NEW_LINE> <INDENT> return ['{GOMA_DIR}']
Platform for Linux Chrome OS target
625990597047854f4634098e
@dataclass(frozen=True) <NEW_LINE> class Dataset: <NEW_LINE> <INDENT> url: URL <NEW_LINE> name: str <NEW_LINE> key_attribute_names: Tuple[str, ...] <NEW_LINE> description: Optional[str] = None
A Tamr dataset See https://docs.tamr.com/reference/dataset-models Args: url: The canonical dataset-based URL for this dataset e.g. `/datasets/1` name key_attribute_names description
625990590fa83653e46f64b4
class CreateSecurityRulesResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RuleIdList = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RuleIdList = params.get("RuleIdList") <NEW_LINE> self.RequestId = params.get("RequestId")
CreateSecurityRules返回参数结构体
625990592ae34c7f260ac6b6
class HeaderList(FileList): <NEW_LINE> <INDENT> include_regex = re.compile(r'(.*?)(\binclude\b)(.*)') <NEW_LINE> def __init__(self, files): <NEW_LINE> <INDENT> super(HeaderList, self).__init__(files) <NEW_LINE> self._macro_definitions = [] <NEW_LINE> self._directories = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def directories(self): <NEW_LINE> <INDENT> values = self._directories <NEW_LINE> if values is None: <NEW_LINE> <INDENT> values = self._default_directories() <NEW_LINE> <DEDENT> return list(dedupe(values)) <NEW_LINE> <DEDENT> @directories.setter <NEW_LINE> def directories(self, value): <NEW_LINE> <INDENT> value = value or [] <NEW_LINE> if isinstance(value, six.string_types): <NEW_LINE> <INDENT> value = [value] <NEW_LINE> <DEDENT> self._directories = [os.path.normpath(x) for x in value] <NEW_LINE> <DEDENT> def _default_directories(self): <NEW_LINE> <INDENT> dir_list = super(HeaderList, self).directories <NEW_LINE> values = [] <NEW_LINE> for d in dir_list: <NEW_LINE> <INDENT> m = self.include_regex.match(d) <NEW_LINE> value = os.path.join(*m.group(1, 2)) if m else d <NEW_LINE> values.append(value) <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> @property <NEW_LINE> def headers(self): <NEW_LINE> <INDENT> return self.files <NEW_LINE> <DEDENT> @property <NEW_LINE> def names(self): <NEW_LINE> <INDENT> names = [] <NEW_LINE> for x in self.basenames: <NEW_LINE> <INDENT> name = x <NEW_LINE> for ext in ['.cuh', '.hpp', '.hh', '.h']: <NEW_LINE> <INDENT> i = name.rfind(ext) <NEW_LINE> if i != -1: <NEW_LINE> <INDENT> names.append(name[:i]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> names.append(name) <NEW_LINE> <DEDENT> <DEDENT> return list(dedupe(names)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def include_flags(self): <NEW_LINE> <INDENT> return ' '.join(['-I' + x for x in self.directories]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def macro_definitions(self): <NEW_LINE> <INDENT> return ' '.join(self._macro_definitions) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cpp_flags(self): <NEW_LINE> <INDENT> cpp_flags = self.include_flags <NEW_LINE> if self.macro_definitions: <NEW_LINE> <INDENT> cpp_flags += ' ' + self.macro_definitions <NEW_LINE> <DEDENT> return cpp_flags <NEW_LINE> <DEDENT> def add_macro(self, macro): <NEW_LINE> <INDENT> self._macro_definitions.append(macro)
Sequence of absolute paths to headers. Provides a few convenience methods to manipulate header paths and get commonly used compiler flags or names.
6259905a009cb60464d02b04
class LiterateWorkflow(pe.Workflow): <NEW_LINE> <INDENT> def __init__(self, name, base_dir=None): <NEW_LINE> <INDENT> super(LiterateWorkflow, self).__init__(name, base_dir) <NEW_LINE> self.__desc__ = None <NEW_LINE> self.__postdesc__ = None <NEW_LINE> <DEDENT> def visit_desc(self): <NEW_LINE> <INDENT> desc = [] <NEW_LINE> if self.__desc__: <NEW_LINE> <INDENT> desc += [self.__desc__] <NEW_LINE> <DEDENT> for node in pe.utils.topological_sort(self._graph)[0]: <NEW_LINE> <INDENT> if isinstance(node, LiterateWorkflow): <NEW_LINE> <INDENT> add_desc = node.visit_desc() <NEW_LINE> if add_desc not in desc: <NEW_LINE> <INDENT> desc.append(add_desc) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.__postdesc__: <NEW_LINE> <INDENT> desc += [self.__postdesc__] <NEW_LINE> <DEDENT> return "".join(desc)
Controls the setup and execution of a pipeline of processes.
625990591b99ca400229001f
class NetworkProfile(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'network_interfaces': {'key': 'networkInterfaces', 'type': '[NetworkInterfaceReference]'}, } <NEW_LINE> def __init__( self, *, network_interfaces: Optional[List["NetworkInterfaceReference"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(NetworkProfile, self).__init__(**kwargs) <NEW_LINE> self.network_interfaces = network_interfaces
Specifies the network interfaces of the virtual machine. :ivar network_interfaces: Specifies the list of resource Ids for the network interfaces associated with the virtual machine. :vartype network_interfaces: list[~azure.mgmt.compute.v2019_12_01.models.NetworkInterfaceReference]
6259905a3c8af77a43b68a28
class PStatsLoader( object ): <NEW_LINE> <INDENT> def __init__( self, *filenames ): <NEW_LINE> <INDENT> self.filename = filenames <NEW_LINE> self.rows = {} <NEW_LINE> self.stats = pstats.Stats( *filenames ) <NEW_LINE> self.tree = self.load( self.stats.stats ) <NEW_LINE> self.location_rows = {} <NEW_LINE> self.location_tree = l = self.load_location( ) <NEW_LINE> <DEDENT> def load( self, stats ): <NEW_LINE> <INDENT> rows = self.rows <NEW_LINE> for func, raw in stats.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rows[func] = row = PStatRow( func,raw ) <NEW_LINE> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> log.info( 'Null row: %s', func ) <NEW_LINE> <DEDENT> <DEDENT> for row in rows.values(): <NEW_LINE> <INDENT> row.weave( rows ) <NEW_LINE> <DEDENT> return self.find_root( rows ) <NEW_LINE> <DEDENT> def find_root( self, rows ): <NEW_LINE> <INDENT> maxes = sorted( rows.values(), key = lambda x: x.cummulative ) <NEW_LINE> if not maxes: <NEW_LINE> <INDENT> raise RuntimeError( """Null results!""" ) <NEW_LINE> <DEDENT> root = maxes[-1] <NEW_LINE> roots = [root] <NEW_LINE> for key,value in rows.items(): <NEW_LINE> <INDENT> if not value.parents: <NEW_LINE> <INDENT> log.debug( 'Found node root: %s', value ) <NEW_LINE> if value not in roots: <NEW_LINE> <INDENT> roots.append( value ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(roots) > 1: <NEW_LINE> <INDENT> root = PStatGroup( directory='*', filename='*', name=_("<profiling run>"), children= roots, ) <NEW_LINE> root.finalize() <NEW_LINE> self.rows[ root.key ] = root <NEW_LINE> <DEDENT> return root <NEW_LINE> <DEDENT> def load_location( self ): <NEW_LINE> <INDENT> directories = {} <NEW_LINE> files = {} <NEW_LINE> root = PStatLocation( '/', 'PYTHONPATH' ) <NEW_LINE> self.location_rows = self.rows.copy() <NEW_LINE> for child in self.rows.values(): <NEW_LINE> <INDENT> current = directories.get( child.directory ) <NEW_LINE> directory, filename = child.directory, child.filename <NEW_LINE> if current is None: <NEW_LINE> <INDENT> if directory == '': <NEW_LINE> <INDENT> current = root <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current = PStatLocation( directory, '' ) <NEW_LINE> self.location_rows[ current.key ] = current <NEW_LINE> <DEDENT> directories[ directory ] = current <NEW_LINE> <DEDENT> if filename == '~': <NEW_LINE> <INDENT> filename = '<built-in>' <NEW_LINE> <DEDENT> file_current = files.get( (directory,filename) ) <NEW_LINE> if file_current is None: <NEW_LINE> <INDENT> file_current = PStatLocation( directory, filename ) <NEW_LINE> self.location_rows[ file_current.key ] = file_current <NEW_LINE> files[ (directory,filename) ] = file_current <NEW_LINE> current.children.append( file_current ) <NEW_LINE> <DEDENT> file_current.children.append( child ) <NEW_LINE> <DEDENT> for key,value in directories.items(): <NEW_LINE> <INDENT> if value is root: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> found = False <NEW_LINE> while key: <NEW_LINE> <INDENT> new_key,rest = os.path.split( key ) <NEW_LINE> if new_key == key: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> key = new_key <NEW_LINE> parent = directories.get( key ) <NEW_LINE> if parent: <NEW_LINE> <INDENT> if value is not parent: <NEW_LINE> <INDENT> parent.children.append( value ) <NEW_LINE> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not found: <NEW_LINE> <INDENT> root.children.append( value ) <NEW_LINE> <DEDENT> <DEDENT> root.finalize() <NEW_LINE> return root
Load profiler statistic from
6259905ad486a94d0ba2d598
class Group(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=32) <NEW_LINE> menu = models.ForeignKey(to='Menu')
权限之Group表
6259905a9c8ee82313040c72
class AsyncQueuePublisher(object): <NEW_LINE> <INDENT> implements(IQueuePublisher) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.reconnect() <NEW_LINE> <DEDENT> def reconnect(self): <NEW_LINE> <INDENT> connectionInfo = getUtility(IAMQPConnectionInfo) <NEW_LINE> queueSchema = getUtility(IQueueSchema) <NEW_LINE> self._amqpClient = AMQPFactory(connectionInfo, queueSchema) <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def publish(self, exchange, routing_key, message, createQueues=None, mandatory=False, headers=None, declareExchange=True): <NEW_LINE> <INDENT> if createQueues: <NEW_LINE> <INDENT> for queue in createQueues: <NEW_LINE> <INDENT> yield self._amqpClient.createQueue(queue) <NEW_LINE> <DEDENT> <DEDENT> result = yield self._amqpClient.send(exchange, routing_key, message, mandatory=mandatory, headers=headers, declareExchange=declareExchange) <NEW_LINE> defer.returnValue(result) <NEW_LINE> <DEDENT> @property <NEW_LINE> def channel(self): <NEW_LINE> <INDENT> return self._amqpClient.channel <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> return self._amqpClient.shutdown()
Sends the protobuf to an exchange in a non-blocking manner
6259905a29b78933be26abac
class StandardExit: <NEW_LINE> <INDENT> def __init__(self, exitable, utxo_pos, output_id, exit_target, amount, bond_size): <NEW_LINE> <INDENT> self.owner = exit_target <NEW_LINE> self.amount = amount <NEW_LINE> self.position = utxo_pos <NEW_LINE> self.exitable = exitable <NEW_LINE> self.output_id = output_id <NEW_LINE> self.bond_size = bond_size <NEW_LINE> <DEDENT> def to_list(self): <NEW_LINE> <INDENT> return [self.owner, self.amount, self.position, self.exitable, self.output_id, self.bond_size] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.to_list().__str__() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_list().__repr__() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if hasattr(other, "to_list"): <NEW_LINE> <INDENT> return self.to_list() == other.to_list() <NEW_LINE> <DEDENT> return (self.to_list() == other) or (self.to_list()[:4] == other)
Represents a Plasma exit. Attributes: owner (str): Address of the exit's owner. amount (int): How much value is being exited. position (int): UTXO position exitable (boolean): whether will exit at processing output_id (str): output exit identifier (not exit id) bond_size (int): value of paid bond
6259905a7b25080760ed87c7
class Message(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.active = False <NEW_LINE> self.flash = False <NEW_LINE> self.border = False <NEW_LINE> self.anim = 0 <NEW_LINE> self.speed = 3 <NEW_LINE> self.bitmap = None <NEW_LINE> self.text = '' <NEW_LINE> self.font = 'Sans' <NEW_LINE> self.offset = 0 <NEW_LINE> <DEDENT> def genBitmap(self): <NEW_LINE> <INDENT> if self.bitmap is not None: <NEW_LINE> <INDENT> return self.bitmap <NEW_LINE> <DEDENT> font = Qt.QFont() <NEW_LINE> font.fromString(self.font) <NEW_LINE> width = 2 * Qt.QFontMetrics(font).boundingRect(self.text).width() <NEW_LINE> if not width: <NEW_LINE> <INDENT> return Bitmap() <NEW_LINE> <DEDENT> image = Qt.QImage(width, HEIGHT, Qt.QImage.Format_Mono) <NEW_LINE> image.fill(0) <NEW_LINE> with Qt.QPainter(image) as painter: <NEW_LINE> <INDENT> painter.setRenderHints(Qt.QPainter.RenderHints()) <NEW_LINE> painter.setFont(font) <NEW_LINE> painter.setPen(Qt.QPen(Qt.QColor('white'))) <NEW_LINE> real_width = painter.drawText(0, -self.offset, width, HEIGHT + self.offset, Qt.Qt.AlignTop | Qt.Qt.AlignLeft, self.text).width() <NEW_LINE> <DEDENT> return Bitmap(image, real_width)
Represents a single message. It can be in one of two modes: if self.bitmap is None, it is generated on the fly from self.text and associated font settings. Otherwise the bitmap is used directly.
6259905af7d966606f7493a0
class RazerBlackWidowChromaOverwatch(_RippleKeyboard): <NEW_LINE> <INDENT> EVENT_FILE_REGEX = re.compile(r'.*BlackWidow_Chroma(-if01)?-event-kbd') <NEW_LINE> USB_VID = 0x1532 <NEW_LINE> USB_PID = 0x0211 <NEW_LINE> HAS_MATRIX = True <NEW_LINE> DEDICATED_MACRO_KEYS = True <NEW_LINE> MATRIX_DIMS = [6, 22] <NEW_LINE> METHODS = ['get_device_type_keyboard', 'set_wave_effect', 'set_static_effect', 'set_spectrum_effect', 'set_reactive_effect', 'set_none_effect', 'set_breath_random_effect', 'set_breath_single_effect', 'set_breath_dual_effect', 'set_custom_effect', 'set_key_row', 'get_game_mode', 'set_game_mode', 'get_macro_mode', 'set_macro_mode', 'get_macro_effect', 'set_macro_effect', 'get_macros', 'delete_macro', 'add_macro', 'set_ripple_effect', 'set_ripple_effect_random_colour'] <NEW_LINE> DEVICE_IMAGE = "https://assets.razerzone.com/eeimages/products/23326/overwatch-razer-gallery-5.png" <NEW_LINE> RAZER_URLS = { "top_img": "https://assets.razerzone.com/eeimages/products/23326/overwatch-razer-gallery-5.png", "side_img": "https://assets.razerzone.com/eeimages/products/23326/overwatch-razer-gallery-3.png", "perspective_img": "https://assets.razerzone.com/eeimages/products/23326/overwatch-razer-gallery-1.png" }
Class for the Razer BlackWidow Chroma (Overwatch)
6259905a2ae34c7f260ac6b7
class Worker(Person): <NEW_LINE> <INDENT> objects = WorkerQuerySet.as_manager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> proxy = True <NEW_LINE> verbose_name = _('worker') <NEW_LINE> verbose_name_plural = _('workers')
Proxy class to tickle.Person so we can add some Fungus specific methods.
6259905a462c4b4f79dbcfd5
class ModuleTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_waterregulation_system(self): <NEW_LINE> <INDENT> self.sensor = Sensor('127.0.0.1', '8000') <NEW_LINE> self.pump = Pump('127.0.0.1', '8000') <NEW_LINE> self.decider = Decider(100, 0.05) <NEW_LINE> self.controller = Controller(self.sensor, self.pump, self.decider) <NEW_LINE> self.pump.set_state = MagicMock(return_value=True) <NEW_LINE> self.pump.get_state = MagicMock(return_value=True) <NEW_LINE> self.sensor.measure = MagicMock(return_value=True) <NEW_LINE> self.decider.decide = MagicMock(return_value=True) <NEW_LINE> self.controller.tick() <NEW_LINE> self.pump.set_state.assert_called_with(True) <NEW_LINE> self.pump.get_state.assert_called_with() <NEW_LINE> self.sensor.measure.assert_called_with() <NEW_LINE> self.decider.decide.assert_called_with(True, True, self.controller.actions)
Module tests for the water-regulation module
6259905add821e528d6da468
class TransitionError(Error): <NEW_LINE> <INDENT> def __init__(self, previous, next, message): <NEW_LINE> <INDENT> self.previous = previous <NEW_LINE> self.next = next <NEW_LINE> self.message = message
Raised when an operation attempts a state transition that' not allowed Attributes: previous -- state at beginning of transition next -- attempted new state message -- explanation of thwy the specific transition is not allowed
6259905a379a373c97d9a5f5
class MBDynAssignLabels(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "import.mdbyn_labels" <NEW_LINE> bl_label = "Import labels of MBDyn objects" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> ret_val = assign_labels(context) <NEW_LINE> if ret_val == {'NOTHING_DONE'}: <NEW_LINE> <INDENT> message = 'MBDyn labels file provided appears to not contain ' + 'correct labels.' <NEW_LINE> self.report({'WARNING'}, message) <NEW_LINE> baseLogger.warning(message) <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> elif ret_val == {'LABELS_UPDATED'}: <NEW_LINE> <INDENT> message = "MBDyn labels imported" <NEW_LINE> self.report({'INFO'}, message) <NEW_LINE> baseLogger.info(message) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> elif ret_val == {'FILE_NOT_FOUND'}: <NEW_LINE> <INDENT> message = "MBDyn labels file not found..." <NEW_LINE> self.report({'ERROR'}, message) <NEW_LINE> baseLogger.error(message) <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> return self.execute(context)
Assigns 'recognisable' labels to MBDyn nodes and elements by parsing the .log file
6259905a009cb60464d02b06
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True
Production configuration
6259905a2ae34c7f260ac6b8
class UserProfileAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = [ 'user', ]
Admin for UserProfile model.
6259905a45492302aabfdaa9
class ChatSession(async_chat): <NEW_LINE> <INDENT> def __init__(self, server, sock): <NEW_LINE> <INDENT> async_chat.__init__(self, sock) <NEW_LINE> self.server = server <NEW_LINE> self.set_terminator('\n') <NEW_LINE> self.data = [] <NEW_LINE> self.name = None <NEW_LINE> self.enter(LoginRoom(server)) <NEW_LINE> <DEDENT> def enter(self, room): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cur = self.room <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur.remove(self) <NEW_LINE> <DEDENT> self.room = room <NEW_LINE> room.add(self) <NEW_LINE> <DEDENT> def collect_incoming_data(self, data): <NEW_LINE> <INDENT> self.data.append(data) <NEW_LINE> <DEDENT> def found_terminator(self): <NEW_LINE> <INDENT> line = ''.join(self.data) <NEW_LINE> self.data = [] <NEW_LINE> try: <NEW_LINE> <INDENT> self.room.handle(self, line) <NEW_LINE> <DEDENT> except EndSession: <NEW_LINE> <INDENT> self.handle_close() <NEW_LINE> <DEDENT> <DEDENT> def handle_close(self): <NEW_LINE> <INDENT> async_chat.handle_close(self) <NEW_LINE> self.enter(LogoutRoom(self.server))
responsible for communicating with a single user
6259905a8e71fb1e983bd09b
class CableGraphics: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cable = ParticleCable() <NEW_LINE> self.shape = cylinder(radius = 0.1,color=color.cyan)
Class responsible for rendering cable links --------- properties: rod - ParticleRod shape - vpython.cylinder
6259905a004d5f362081fad6
class glcmCalculator(object): <NEW_LINE> <INDENT> def __init__(self, eeImage, info): <NEW_LINE> <INDENT> self.eeImage = eeImage <NEW_LINE> self.info = info <NEW_LINE> <DEDENT> def calc_glcm(self): <NEW_LINE> <INDENT> img = [] <NEW_LINE> for band in self.info: <NEW_LINE> <INDENT> dummy = self.eeImage.bands_dic[band] <NEW_LINE> if self.info[band] not in [1, None]: <NEW_LINE> <INDENT> dummy = dummy.multiply(self.info[band]) <NEW_LINE> <DEDENT> img.append(dummy) <NEW_LINE> <DEDENT> image = ee.Image(img) <NEW_LINE> image = image.toInt() <NEW_LINE> glcm_image = image.glcmTexture() <NEW_LINE> return glcm_image <NEW_LINE> <DEDENT> def glcm_bands_names(self): <NEW_LINE> <INDENT> glcms = ['asm', 'contrast', 'corr', 'var', 'idm', 'savg', 'svar', 'sent', 'ent', 'dvar', 'dent', 'imcorr1', 'imcorr2', 'maxcorr', 'diss', 'inertia', 'shade', 'prom'] <NEW_LINE> band_names = [] <NEW_LINE> for band in self.info: <NEW_LINE> <INDENT> for glcm in glcms: <NEW_LINE> <INDENT> dummy = '_'.join([band, glcm]) <NEW_LINE> band_names.append(dummy) <NEW_LINE> <DEDENT> <DEDENT> return band_names <NEW_LINE> <DEDENT> def calc(self): <NEW_LINE> <INDENT> image = self.calc_glcm() <NEW_LINE> bands = self.glcm_bands_names() <NEW_LINE> self.eeImage.update(image, bands) <NEW_LINE> return self.eeImage
Docstring.
6259905a99cbb53fe68324b1
class Agent(Entity): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Agent, self).__init__() <NEW_LINE> self.movable = True <NEW_LINE> self.silent = False <NEW_LINE> self.blind = False <NEW_LINE> self.u_noise = None <NEW_LINE> self.c_noise = None <NEW_LINE> self.u_range = 1.0 <NEW_LINE> self.state = AgentState() <NEW_LINE> self.action = Action() <NEW_LINE> self.action_callback = None <NEW_LINE> self.replay_buffer = None <NEW_LINE> <DEDENT> def bind_callback(self, func_name, func_entity): <NEW_LINE> <INDENT> assert getattr(func_name, None) is None, "Repeated function registion" <NEW_LINE> setattr(self, func_name, func_entity) <NEW_LINE> <DEDENT> def callback(self, func_name, arg_list): <NEW_LINE> <INDENT> assert getattr(self, func_name, None) is not None, "{} does not exist, pls check you've registed it".format(func_name) <NEW_LINE> assert arg_list is None or isinstance(arg_list, tuple or list), "arg_list can be None or tuple or list" <NEW_LINE> return getattr(self, func_name)(*arg_list)
Properties of agent entities
6259905a0a50d4780f7068a7