code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class DynamicOutputArgumentsHelper(interface.ArgumentsHelper): <NEW_LINE> <INDENT> NAME = 'dynamic' <NEW_LINE> CATEGORY = 'output' <NEW_LINE> DESCRIPTION = 'Argument helper for the dynamic output module.' <NEW_LINE> _DEFAULT_FIELDS = [ 'datetime', 'timestamp_desc', 'source', 'source_long', 'message', 'parser', 'display_name', 'tag'] <NEW_LINE> @classmethod <NEW_LINE> def AddArguments(cls, argument_group): <NEW_LINE> <INDENT> default_fields = ','.join(cls._DEFAULT_FIELDS) <NEW_LINE> argument_group.add_argument( '--fields', dest='fields', type=str, action='store', default=default_fields, help=( 'Defines which fields should be included in the output.')) <NEW_LINE> default_fields = ', '.join(cls._DEFAULT_FIELDS) <NEW_LINE> argument_group.add_argument( '--additional_fields', '--additional-fields', dest='additional_fields', type=str, action='store', default='', help=( 'Defines extra fields to be included in the output, in addition to ' 'the default fields, which are {0:s}.'.format(default_fields))) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ParseOptions(cls, options, output_module): <NEW_LINE> <INDENT> if not isinstance(output_module, dynamic.DynamicOutputModule): <NEW_LINE> <INDENT> raise errors.BadConfigObject( 'Output module is not an instance of DynamicOutputModule') <NEW_LINE> <DEDENT> default_fields = ','.join(cls._DEFAULT_FIELDS) <NEW_LINE> fields = cls._ParseStringOption( options, 'fields', default_value=default_fields) <NEW_LINE> additional_fields = cls._ParseStringOption(options, 'additional_fields') <NEW_LINE> if additional_fields: <NEW_LINE> <INDENT> fields = ','.join([fields, additional_fields]) <NEW_LINE> <DEDENT> output_module.SetFields([ field_name.strip() for field_name in fields.split(',')])
Dynamic output module CLI arguments helper.
6259907192d797404e3897bc
class format_choice(wizard.interface): <NEW_LINE> <INDENT> def _select_format(self, cr, uid, data, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> wiz_name = self.wiz_name.replace('jasper.', '') <NEW_LINE> pool = pooler.get_pool(cr.dbname) <NEW_LINE> document_obj = pool.get('jasper.document') <NEW_LINE> doc_ids = document_obj.search(cr, uid, [('service', '=', wiz_name)]) <NEW_LINE> if not doc_ids: <NEW_LINE> <INDENT> raise wizard.except_wizard(_('Error'), _('No report found!')) <NEW_LINE> <DEDENT> document = document_obj.browse(cr, uid, doc_ids[0], context=context) <NEW_LINE> if document.id: <NEW_LINE> <INDENT> if document.format_choice == 'mono': <NEW_LINE> <INDENT> action = 'create_wizard' <NEW_LINE> <DEDENT> elif document.format_choice == 'multi': <NEW_LINE> <INDENT> action = 'format_choice' <NEW_LINE> raise wizard.except_wizard(_('Error'), _('No implemented yet!')) <NEW_LINE> <DEDENT> <DEDENT> return action <NEW_LINE> <DEDENT> def _create_wizard(self, cr, uid, data, context=None): <NEW_LINE> <INDENT> wiz_name = self.wiz_name.replace('jasper.', '') <NEW_LINE> pool = pooler.get_pool(cr.dbname) <NEW_LINE> document_obj = pool.get('jasper.document') <NEW_LINE> doc_ids = document_obj.search(cr, uid, [('service', '=', wiz_name)]) <NEW_LINE> if not doc_ids: <NEW_LINE> <INDENT> raise wizard.except_wizard(_('Error'), _('No report found!')) <NEW_LINE> <DEDENT> document = document_obj.browse(cr, uid, doc_ids[0], context=context) <NEW_LINE> option = { 'id': document.id, 'attachment': document.attachment, 'attachment_use': document.attachment_use, } <NEW_LINE> uri = '/openerp/bases/%s/%s' % (cr.dbname, document.report_unit) <NEW_LINE> data['form']['params'] = (document.format, uri, document.mode, document.depth, option) <NEW_LINE> data['form']['ids'] = data['ids'] <NEW_LINE> return data['form'] <NEW_LINE> <DEDENT> states = { 'init': { 'actions': [], 'result': {'type': 'choice', 'next_state': _select_format} }, 'format_choice': { 'actions': [], 'result': { 'type': 'form', 'arch': form, 'fields': fields, 'state': (('end', 'Cancel', 'gtk-cancel'), ('create_wizard', 'OK', 'gtk-ok', True)) } }, 'create_wizard': { 'actions': [_create_wizard], 'result': { 'type': 'print', 'report': 'print.jasper.server', 'state': 'end' } }, }
If format = multi, compose a wizard to ask the extension of document if format = mono, juste launch teh report and return the format previously defined
62599071435de62698e9d6c8
class CNN100(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CNN100, self).__init__() <NEW_LINE> self.conv1 = nn.Conv2d(in_channels=1, out_channels=32, kernel_size=5, padding=(2, 2)) <NEW_LINE> self.conv2 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, padding=(1, 1)) <NEW_LINE> self.conv3 = nn.Conv2d(in_channels=64, out_channels=96, kernel_size=3, padding=(1, 1)) <NEW_LINE> self.pool5 = nn.MaxPool2d(kernel_size=5) <NEW_LINE> self.pool2 = nn.MaxPool2d(kernel_size=2) <NEW_LINE> self.activation = nn.ReLU() <NEW_LINE> self.dropout = nn.Dropout(0.5) <NEW_LINE> self.fc1 = nn.Linear(in_features=96*2*2, out_features=64) <NEW_LINE> self.fc2 = nn.Linear(in_features=64, out_features=2) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.activation(self.conv1(x)) <NEW_LINE> x = self.pool5(x) <NEW_LINE> x = self.activation(self.conv2(x)) <NEW_LINE> x = self.pool2(x) <NEW_LINE> x = self.activation(self.conv3(x)) <NEW_LINE> x = self.pool2(x) <NEW_LINE> x = x.view(x.shape[0], -1) <NEW_LINE> x = self.activation(self.fc1(x)) <NEW_LINE> x = self.dropout(x) <NEW_LINE> x = self.fc2(x) <NEW_LINE> return x
A larger CNN model
625990718a43f66fc4bf3a57
class IsAdminOrOwnerReadOnly(BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> return bool( (request.method in SAFE_METHODS and request.user == obj.user) or request.user and request.user.is_authenticated and request.user.is_staff )
Кастомные права доступа (Любые методы для админа, для владельца только чтение)
625990714c3428357761bb77
class ValuesPointer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.values = Values() <NEW_LINE> <DEDENT> def swap(self, new_values): <NEW_LINE> <INDENT> assert isinstance(new_values, Values) <NEW_LINE> old = self.values <NEW_LINE> self.values = new_values <NEW_LINE> return old
A ValuesPointer points to a Values instance. This indirection is needed so several Settings instances (with potentially different resolvers / scopes) share the same ValuesPointer, such that one instance's values are updated, all of the other instances share that update.
62599071cc0a2c111447c732
class BaseExecutionEngine(BaseEngine): <NEW_LINE> <INDENT> def evaluate(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_expression_engine(self, name): <NEW_LINE> <INDENT> return self.manager.get_expression_engine(name) <NEW_LINE> <DEDENT> def get_output_schema(self, process): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def discover_process(self, path): <NEW_LINE> <INDENT> return []
A workflow execution engine.
625990713d592f4c4edbc7a4
class BinaryHierarchicalSoftmax(link.Link): <NEW_LINE> <INDENT> def __init__(self, in_size, tree): <NEW_LINE> <INDENT> self._func = BinaryHierarchicalSoftmaxFunction(tree) <NEW_LINE> super(BinaryHierarchicalSoftmax, self).__init__( W=(self._func.parser_size, in_size)) <NEW_LINE> self.W.data[...] = numpy.random.uniform(-1, 1, self.W.data.shape) <NEW_LINE> <DEDENT> def to_gpu(self, device=None): <NEW_LINE> <INDENT> with cuda.get_device(device): <NEW_LINE> <INDENT> super(BinaryHierarchicalSoftmax, self).to_gpu(device) <NEW_LINE> self._func.to_gpu(device) <NEW_LINE> <DEDENT> <DEDENT> def to_cpu(self): <NEW_LINE> <INDENT> super(BinaryHierarchicalSoftmax, self).to_cpu() <NEW_LINE> self._func.to_cpu() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_huffman_tree(word_counts): <NEW_LINE> <INDENT> if len(word_counts) == 0: <NEW_LINE> <INDENT> raise ValueError('Empty vocabulary') <NEW_LINE> <DEDENT> q = six.moves.queue.PriorityQueue() <NEW_LINE> for uid, (w, c) in enumerate(six.iteritems(word_counts)): <NEW_LINE> <INDENT> q.put((c, uid, w)) <NEW_LINE> <DEDENT> while q.qsize() >= 2: <NEW_LINE> <INDENT> (count1, id1, word1) = q.get() <NEW_LINE> (count2, id2, word2) = q.get() <NEW_LINE> count = count1 + count2 <NEW_LINE> tree = (word1, word2) <NEW_LINE> q.put((count, min(id1, id2), tree)) <NEW_LINE> <DEDENT> return q.get()[2] <NEW_LINE> <DEDENT> def __call__(self, x, t): <NEW_LINE> <INDENT> f = copy.copy(self._func) <NEW_LINE> return f(x, t, self.W)
Hierarchical softmax layer over binary tree. In natural language applications, vocabulary size is too large to use softmax loss. Instead, the hierarchical softmax uses product of sigmoid functions. It costs only :math:`O(\log(n))` time where :math:`n` is the vocabulary size in average. At first a user need to prepare a binary tree whose each leaf is corresponding to a word in a vocabulary. When a word :math:`x` is given, exactly one path from the root of the tree to the leaf of the word exists. Let :math:`\mbox{path}(x) = ((e_1, b_1), \dots, (e_m, b_m))` be the path of :math:`x`, where :math:`e_i` is an index of :math:`i`-th internal node, and :math:`b_i \in \{-1, 1\}` indicates direction to move at :math:`i`-th internal node (-1 is left, and 1 is right). Then, the probability of :math:`x` is given as below: .. math:: P(x) &= \prod_{(e_i, b_i) \in \mbox{path}(x)}P(b_i | e_i) \\ &= \prod_{(e_i, b_i) \in \mbox{path}(x)}\sigma(b_i x^\top w_{e_i}), where :math:`\sigma(\cdot)` is a sigmoid function, and :math:`w` is a weight matrix. This function costs :math:`O(\log(n))` time as an average length of paths is :math:`O(\log(n))`, and :math:`O(n)` memory as the number of internal nodes equals :math:`n - 1`. Args: in_size (int): Dimension of input vectors. tree: A binary tree made with tuples like `((1, 2), 3)`. Attributes: W (~chainer.Variable): Weight parameter matrix. See: Hierarchical Probabilistic Neural Network Language Model [Morin+, AISTAT2005].
625990712ae34c7f260ac9ad
class Serializable(object): <NEW_LINE> <INDENT> REQUIRED = dict() <NEW_LINE> OPTIONAL = dict() <NEW_LINE> def __init__(self, **fields): <NEW_LINE> <INDENT> self._fields = {} <NEW_LINE> for field, value in fields.items(): <NEW_LINE> <INDENT> self._validate(field, value) <NEW_LINE> self._fields[field] = value <NEW_LINE> <DEDENT> missing_fields = set(self.REQUIRED) - set(self._fields) <NEW_LINE> if missing_fields: <NEW_LINE> <INDENT> raise exceptions.DeserializeError.missing_fields( missing_fields) <NEW_LINE> <DEDENT> for field in set(self.OPTIONAL) - set(self._fields): <NEW_LINE> <INDENT> self._fields[field] = self.OPTIONAL[field].default <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def deserialize(cls, fields): <NEW_LINE> <INDENT> return cls(**fields) <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> json = {} <NEW_LINE> for field, value in self._fields.items(): <NEW_LINE> <INDENT> if value != self._field_type(field).default or field in self.REQUIRED: <NEW_LINE> <INDENT> json[field] = value <NEW_LINE> <DEDENT> <DEDENT> return json <NEW_LINE> <DEDENT> def __getitem__(self, field): <NEW_LINE> <INDENT> if field not in self._fields: <NEW_LINE> <INDENT> raise KeyError(field) <NEW_LINE> <DEDENT> return self._fields[field] <NEW_LINE> <DEDENT> def __setitem__(self, field, value): <NEW_LINE> <INDENT> self._validate(field, value) <NEW_LINE> self._fields[field] = value <NEW_LINE> <DEDENT> def _field_type(self, field): <NEW_LINE> <INDENT> if field in self.REQUIRED: <NEW_LINE> <INDENT> return self.REQUIRED[field] <NEW_LINE> <DEDENT> elif field in self.OPTIONAL: <NEW_LINE> <INDENT> return self.OPTIONAL[field] <NEW_LINE> <DEDENT> raise AttributeError('{} has no field {}'.format( self.__class__.__name__, field)) <NEW_LINE> <DEDENT> def _validate(self, field, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> field_type = self._field_type(field) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise exceptions.DeserializeError.unexpected_field(field) <NEW_LINE> <DEDENT> if not field_type.validate(value): <NEW_LINE> <INDENT> raise exceptions.DeserializeError.unexpected_type( field, field_type, value)
An abstract class for serializable objects.
62599071f548e778e596ce50
class Card(object): <NEW_LINE> <INDENT> RANKS = ['A', '2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K'] <NEW_LINE> SUITS = ['c', 'd', 'h', 's'] <NEW_LINE> def __init__(self, rank, suit): <NEW_LINE> <INDENT> self.rank = rank <NEW_LINE> self.suit = suit <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> rep = self.rank + self.suit <NEW_LINE> return rep
Karty do gry
6259907156ac1b37e6303944
class Pants: <NEW_LINE> <INDENT> def __init__(self, color, waist_size, length, price): <NEW_LINE> <INDENT> self.color = color <NEW_LINE> self.waist_size = waist_size <NEW_LINE> self.length = length <NEW_LINE> self.price = price <NEW_LINE> <DEDENT> def change_price(self, new_price): <NEW_LINE> <INDENT> self.price = new_price <NEW_LINE> <DEDENT> def discount(self, percentage): <NEW_LINE> <INDENT> return self.price * (1 - percentage)
The Pants class represents an article of clothing sold in a store
6259907191f36d47f2231af0
class Solution: <NEW_LINE> <INDENT> def maxValue(self, grid: List[List[int]]) -> int: <NEW_LINE> <INDENT> dp = [[0 for _ in range(len(grid[0]))] for _ in range(len(grid))] <NEW_LINE> temp = 0 <NEW_LINE> for i in range(len(grid)): <NEW_LINE> <INDENT> temp += grid[i][0] <NEW_LINE> dp[i][0] = temp <NEW_LINE> <DEDENT> temp = grid[0][0] <NEW_LINE> for i in range(1, len(grid[0])): <NEW_LINE> <INDENT> temp += grid[0][i] <NEW_LINE> dp[0][i] = temp <NEW_LINE> <DEDENT> for i in range(1, len(grid)): <NEW_LINE> <INDENT> for j in range(1, len(grid[0])): <NEW_LINE> <INDENT> dp[i][j] = grid[i][j] + max(dp[i-1][j], dp[i][j-1]) <NEW_LINE> <DEDENT> <DEDENT> return dp[-1][-1]
在一个 m*n 的棋盘的每一格都放有一个礼物,每个礼物都有一定的价值(价值大于 0)。 你可以从棋盘的左上角开始拿格子里的礼物,并每次向右或者向下移动一格、直到到达棋盘的右下角。 给定一个棋盘及其上面的礼物的价值,请计算你最多能拿到多少价值的礼物?
6259907197e22403b383c7c7
class StartDateTransformer(FilteringTransformerMixin, BlockStructureTransformer): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> MERGED_START_DATE = 'merged_start_date' <NEW_LINE> @classmethod <NEW_LINE> def name(cls): <NEW_LINE> <INDENT> return "start_date" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _get_merged_start_date(cls, block_structure, block_key): <NEW_LINE> <INDENT> return block_structure.get_transformer_block_field( block_key, cls, cls.MERGED_START_DATE, False ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def collect(cls, block_structure): <NEW_LINE> <INDENT> block_structure.request_xblock_fields('days_early_for_beta') <NEW_LINE> collect_merged_date_field( block_structure, transformer=cls, xblock_field_name='start', merged_field_name=cls.MERGED_START_DATE, default_date=DEFAULT_START_DATE, func_merge_parents=min, func_merge_ancestors=max, ) <NEW_LINE> <DEDENT> def transform_block_filters(self, usage_info, block_structure): <NEW_LINE> <INDENT> if usage_info.has_staff_access: <NEW_LINE> <INDENT> return [block_structure.create_universal_filter()] <NEW_LINE> <DEDENT> removal_condition = lambda block_key: not check_start_date( usage_info.user, block_structure.get_xblock_field(block_key, 'days_early_for_beta'), self._get_merged_start_date(block_structure, block_key), usage_info.course_key, ) <NEW_LINE> return [block_structure.create_removal_filter(removal_condition)]
A transformer that enforces the 'start' and 'days_early_for_beta' fields on blocks by removing blocks from the block structure for which the user does not have access. The 'start' field on a block is percolated down to its descendants, so that all blocks enforce the 'start' field from their ancestors. The assumed 'start' value for a block is then the maximum of its parent and its own. For a block with multiple parents, the assumed parent start date value is a computed minimum of the start dates of all its parents. So as long as one parent chain allows access, the block has access. Staff users are exempted from visibility rules.
6259907167a9b606de547705
class PlantaList(CoreMixinLoginRequired, TemplateView): <NEW_LINE> <INDENT> template_name = 'planta_list.html'
View para renderização da lista
625990714a966d76dd5f07ae
class IBANFormField(CharValidator): <NEW_LINE> <INDENT> def __init__(self, use_nordea_extensions=False, include_countries=None, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault('min_length', IBAN_MIN_LENGTH) <NEW_LINE> kwargs.setdefault('max_length', 34) <NEW_LINE> self.default_validators = [IBANValidator(use_nordea_extensions, include_countries)] <NEW_LINE> super(IBANFormField, self).__init__(*args, **kwargs)
An IBAN consists of up to 34 alphanumeric characters. To limit validation to specific countries, set the 'include_countries' argument with a tuple or list of ISO 3166-1 alpha-2 codes. For example, `include_countries=('NL', 'BE, 'LU')`. A list of countries that use IBANs as part of SEPA is included for convenience. To use this feature, set `include_countries=IBAN_SEPA_COUNTRIES` as an argument to the field. In addition to validating official IBANs, this field can optionally validate unofficial IBANs that have been catalogued by Nordea by setting the `use_nordea_extensions` argument to True. https://en.wikipedia.org/wiki/International_Bank_Account_Number .. versionadded:: 1.1
6259907163b5f9789fe86a27
class terminateSession_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'key', None, None, ), ) <NEW_LINE> def __init__(self, key=None,): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.key = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('terminateSession_args') <NEW_LINE> if self.key is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('key', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.key) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.key) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - key
6259907138b623060ffaa4b6
class Delivery(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=200, verbose_name='Заголовок') <NEW_LINE> text = models.TextField(verbose_name='Описание') <NEW_LINE> is_active = models.BooleanField(default=True, verbose_name='Модерация') <NEW_LINE> created = models.DateTimeField(auto_now_add=True, verbose_name='Создан') <NEW_LINE> update = models.DateTimeField(auto_now=True, verbose_name='Обновлен') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Доставка' <NEW_LINE> verbose_name_plural = 'Доставка' <NEW_LINE> ordering = ['-created'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
Информация о доставке товаров
62599071e1aae11d1e7cf46f
class GenreViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Genre.objects.all() <NEW_LINE> serializer_class = GenreSerializer
The Genre View provides the `list`, `create`, and `retrieve` actions. Please click on a specific Order's url for the `update` and `destroy` actions.
6259907123849d37ff85297b
class SrMiseLogError(SrMiseError): <NEW_LINE> <INDENT> def __init__(self, info): <NEW_LINE> <INDENT> SrMiseError.__init__(self, info)
diffpy.srmise exception class. Error while handling logging capabilities.
6259907121bff66bcd72452c
class HiddenMultiField(wtforms.fields.TextField): <NEW_LINE> <INDENT> widget = HiddenMultiInput() <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.separator = kwargs.pop('separator', ',') <NEW_LINE> super(HiddenMultiField, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def _value(self): <NEW_LINE> <INDENT> if self.data: <NEW_LINE> <INDENT> return self.separator.join(self.data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def process_formdata(self, valuelist): <NEW_LINE> <INDENT> super(HiddenMultiField, self).process_formdata(valuelist) <NEW_LINE> if not self.data: <NEW_LINE> <INDENT> self.data = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = self.data.split(self.separator)
A hidden field that stores multiple comma-separated values, meant to be used as an Ajax widget target. The optional ``separator`` parameter can be used to specify an alternate separator character (default ``','``).
62599071be8e80087fbc0954
class WipViolationViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> serializer_class = WipViolationSerializer <NEW_LINE> queryset = WipViolation.objects.all() <NEW_LINE> def list(self, request): <NEW_LINE> <INDENT> if request.QUERY_PARAMS.get('cardId'): <NEW_LINE> <INDENT> moveQS = WipViolation.objects.filter(card=request.QUERY_PARAMS['cardId']) <NEW_LINE> serializer = self.serializer_class(moveQS, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> queryset = WipViolation.objects.all() <NEW_LINE> serializer = self.serializer_class(queryset, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK)
API endpoint that allows WIP violations to be viewed or edited.
625990714f6381625f19a10b
class MountEntry(object): <NEW_LINE> <INDENT> __slots__ = ( 'source', 'target', 'fs_type', 'mnt_opts', 'mount_id', 'parent_id' ) <NEW_LINE> def __init__(self, source, target, fs_type, mnt_opts, mount_id, parent_id): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> self.target = target <NEW_LINE> self.fs_type = fs_type <NEW_LINE> self.mnt_opts = mnt_opts <NEW_LINE> self.mount_id = int(mount_id) <NEW_LINE> self.parent_id = int(parent_id) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( '{name}(source={src!r}, target={target!r}, ' 'fs_type={fs_type!r}, mnt_opts={mnt_opts!r})' ).format( name=self.__class__.__name__, src=self.source, target=self.target, fs_type=self.fs_type, mnt_opts=self.mnt_opts ) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.target < other.target <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> res = ( (self.mount_id == other.mount_id) and (self.parent_id == other.parent_id) and (self.source == other.source) and (self.target == other.target) and (self.fs_type == other.fs_type) and (self.mnt_opts == other.mnt_opts) ) <NEW_LINE> return res <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def mount_entry_parse(cls, mount_entry_line): <NEW_LINE> <INDENT> mount_entry_line = mount_entry_line.strip().split(' ') <NEW_LINE> ( mount_id, parent_id, _major_minor, _parent_path, target, mnt_opts ), data = mount_entry_line[:6], mount_entry_line[6:] <NEW_LINE> fields = [] <NEW_LINE> while data[0] != '-': <NEW_LINE> <INDENT> fields.append(data.pop(0)) <NEW_LINE> <DEDENT> ( _, fs_type, source, mnt_opts2 ) = data <NEW_LINE> mnt_opts = set(mnt_opts.split(',') + mnt_opts2.split(',')) <NEW_LINE> return cls(source, target, fs_type, mnt_opts, mount_id, parent_id)
Mount table entry data.
62599071a219f33f346c80cf
class NewFolder(NewItem): <NEW_LINE> <INDENT> name = eg.text.MainFrame.Menu.AddFolder.replace("&", "") <NEW_LINE> @eg.AssertInMainThread <NEW_LINE> @eg.LogIt <NEW_LINE> def Do(self, selection): <NEW_LINE> <INDENT> document = self.document <NEW_LINE> def ProcessInActionThread(): <NEW_LINE> <INDENT> if isinstance( selection, (document.MacroItem, document.AutostartItem) ): <NEW_LINE> <INDENT> parent = selection.parent <NEW_LINE> pos = parent.childs.index(selection) + 1 <NEW_LINE> if pos >= len(parent.childs): <NEW_LINE> <INDENT> pos = -1 <NEW_LINE> <DEDENT> <DEDENT> elif isinstance( selection, (document.ActionItem, document.EventItem, document.PluginItem) ): <NEW_LINE> <INDENT> parent = selection.parent.parent <NEW_LINE> pos = parent.childs.index(selection.parent) + 1 <NEW_LINE> if pos >= len(parent.childs): <NEW_LINE> <INDENT> pos = -1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> parent = selection <NEW_LINE> pos = -1 <NEW_LINE> <DEDENT> return document.FolderItem.Create( parent, pos, name=eg.text.General.unnamedFolder ) <NEW_LINE> <DEDENT> item = eg.actionThread.Func(ProcessInActionThread)() <NEW_LINE> self.StoreItem(item) <NEW_LINE> item.Select() <NEW_LINE> return item
Create a new FolderItem if the user has choosen to do so from the menu or toolbar.
6259907155399d3f05627ddd
class ChatSessionMessage(TrackableDateModel): <NEW_LINE> <INDENT> user = models.ForeignKey(User, on_delete=models.PROTECT) <NEW_LINE> chat_session = models.ForeignKey( ChatSession, related_name='messages', on_delete=models.PROTECT ) <NEW_LINE> message = models.TextField(max_length=2000) <NEW_LINE> objects = models.Manager() <NEW_LINE> def to_json(self): <NEW_LINE> <INDENT> return {'user': deserialize_user(self.user), 'message': self.message}
Store messages for a session.
625990714428ac0f6e659df9
class VoiceFilterInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.BizId = None <NEW_LINE> self.FileId = None <NEW_LINE> self.FileName = None <NEW_LINE> self.OpenId = None <NEW_LINE> self.Timestamp = None <NEW_LINE> self.Data = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.BizId = params.get("BizId") <NEW_LINE> self.FileId = params.get("FileId") <NEW_LINE> self.FileName = params.get("FileName") <NEW_LINE> self.OpenId = params.get("OpenId") <NEW_LINE> self.Timestamp = params.get("Timestamp") <NEW_LINE> if params.get("Data") is not None: <NEW_LINE> <INDENT> self.Data = [] <NEW_LINE> for item in params.get("Data"): <NEW_LINE> <INDENT> obj = VoiceFilter() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Data.append(obj)
语音文件过滤详情
625990711f037a2d8b9e54cd
class ColumnAggregator(experimenter.Experimenter): <NEW_LINE> <INDENT> def __init__(self, index, settings=None): <NEW_LINE> <INDENT> super(ColumnAggregator, self).__init__(index, None) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if isinstance(self._index, list): <NEW_LINE> <INDENT> if isinstance(self._index[0], pandas.DataFrame): <NEW_LINE> <INDENT> self._index = [ColumnAggregator(x).run() for x in self._index] <NEW_LINE> <DEDENT> self._index = pandas.DataFrame(dict([(i, x) for i, x in enumerate(self._index)])) <NEW_LINE> <DEDENT> return self._index.select(lambda x: x != u'all', axis=1).sum(axis=1, skipna=True)
Experiment that aggregates data from all columns of a :class:`DataFrame`, a list of :class:`DataFrame` objects, or a list of :class:`Series`, into a single :class:`Series`. Aggregation is done through addition. If a :class:`DataFrame` has a column with the name :obj:`u'all'`, it will *not* be included in the aggregation.
625990717b25080760ed8946
class TerraformLexer(RegexLexer): <NEW_LINE> <INDENT> name = 'Terraform' <NEW_LINE> aliases = ['terraform', 'tf'] <NEW_LINE> filenames = ['*.tf'] <NEW_LINE> mimetypes = ['application/x-tf', 'application/x-terraform'] <NEW_LINE> embedded_keywords = ('ingress', 'egress', 'listener', 'default', 'connection', 'alias', 'terraform', 'tags', 'vars', 'config', 'lifecycle', 'timeouts') <NEW_LINE> tokens = { 'root': [ include('string'), include('punctuation'), include('curly'), include('basic'), include('whitespace'), (r'[0-9]+', Number), ], 'basic': [ (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), (r'\s*/\*', Comment.Multiline, 'comment'), (r'\s*#.*\n', Comment.Single), (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), (words(('variable', 'resource', 'provider', 'provisioner', 'module', 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration), (r'\$\{', String.Interpol, 'var_builtin'), ], 'function': [ (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), include('punctuation'), include('curly'), ], 'var_builtin': [ (r'\$\{', String.Interpol, '#push'), (words(('concat', 'file', 'join', 'lookup', 'element'), prefix=r'\b', suffix=r'\b'), Name.Builtin), include('string'), include('punctuation'), (r'\s+', Text), (r'\}', String.Interpol, '#pop'), ], 'string': [ (r'(".*")', bygroups(String.Double)), ], 'punctuation': [ (r'[\[\](),.]', Punctuation), ], 'curly': [ (r'\{', Text.Punctuation), (r'\}', Text.Punctuation), ], 'comment': [ (r'[^*/]', Comment.Multiline), (r'/\*', Comment.Multiline, '#push'), (r'\*/', Comment.Multiline, '#pop'), (r'[*/]', Comment.Multiline) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), ], }
Lexer for `terraformi .tf files <https://www.terraform.io/>`_. .. versionadded:: 2.1
625990712ae34c7f260ac9b0
class DischargeError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> super(DischargeError, self).__init__( 'third party refused dischargex: {}'.format(msg))
This is thrown by Client when a third party has refused a discharge
6259907126068e7796d4e201
class Critic(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, seed, fcs1_units=400, fc2_units=300): <NEW_LINE> <INDENT> super(Critic, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> self.fcs1 = nn.Linear(state_size, fcs1_units) <NEW_LINE> self.bn1 = nn.BatchNorm1d(fcs1_units) <NEW_LINE> self.fc2 = nn.Linear(fcs1_units+action_size, fc2_units) <NEW_LINE> self.bn2 = nn.BatchNorm1d(fc2_units) <NEW_LINE> self.fc3 = nn.Linear(fc2_units, 1) <NEW_LINE> self.reset_parameters() <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> self.fcs1.weight.data.uniform_(*hidden_init(self.fcs1)) <NEW_LINE> self.fc2.weight.data.uniform_(*hidden_init(self.fc2)) <NEW_LINE> self.fc3.weight.data.uniform_(-3e-3, 3e-3) <NEW_LINE> <DEDENT> def forward(self, state, action): <NEW_LINE> <INDENT> if state.dim() == 1: <NEW_LINE> <INDENT> state = torch.unsqueeze(state,0) <NEW_LINE> <DEDENT> xs = F.relu(self.bn1(self.fcs1(state))) <NEW_LINE> x = torch.cat((xs, action), dim=1) <NEW_LINE> x = F.relu(self.bn2(self.fc2(x))) <NEW_LINE> return self.fc3(x)
Critic (Value) Model.
62599071ec188e330fdfa169
class WebsocketServer(ThreadingMixIn, TCPServer, API): <NEW_LINE> <INDENT> allow_reuse_address = True <NEW_LINE> daemon_threads = True <NEW_LINE> clients = [] <NEW_LINE> id_counter = 0 <NEW_LINE> def __init__(self, port, host='127.0.0.1', loglevel=logging.WARNING): <NEW_LINE> <INDENT> logger.setLevel(loglevel) <NEW_LINE> TCPServer.__init__(self, (host, port), WebSocketHandler) <NEW_LINE> self.port = self.socket.getsockname()[1] <NEW_LINE> <DEDENT> def _message_received_(self, handler, msg): <NEW_LINE> <INDENT> self.message_received(self.handler_to_client(handler), self, msg) <NEW_LINE> <DEDENT> def _ping_received_(self, handler, msg): <NEW_LINE> <INDENT> handler.send_pong(msg) <NEW_LINE> <DEDENT> def _pong_received_(self, handler, msg): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _new_client_(self, handler): <NEW_LINE> <INDENT> self.id_counter += 1 <NEW_LINE> client = { 'id': self.id_counter, 'handler': handler, 'address': handler.client_address } <NEW_LINE> self.clients.append(client) <NEW_LINE> self.new_client(client, self) <NEW_LINE> <DEDENT> def _client_left_(self, handler): <NEW_LINE> <INDENT> client = self.handler_to_client(handler) <NEW_LINE> self.client_left(client, self) <NEW_LINE> if client in self.clients: <NEW_LINE> <INDENT> self.clients.remove(client) <NEW_LINE> <DEDENT> <DEDENT> def _unicast_(self, to_client, msg): <NEW_LINE> <INDENT> to_client['handler'].send_message(msg) <NEW_LINE> <DEDENT> def _multicast_(self, msg): <NEW_LINE> <INDENT> for client in self.clients: <NEW_LINE> <INDENT> self._unicast_(client, msg) <NEW_LINE> <DEDENT> <DEDENT> def handler_to_client(self, handler): <NEW_LINE> <INDENT> for client in self.clients: <NEW_LINE> <INDENT> if client['handler'] == handler: <NEW_LINE> <INDENT> return client
A websocket server waiting for clients to connect. Args: port(int): Port to bind to host(str): Hostname or IP to listen for connections. By default 127.0.0.1 is being used. To accept connections from any client, you should use 0.0.0.0. loglevel: Logging level from logging module to use for logging. By default warnings and errors are being logged. Properties: clients(list): A list of connected clients. A client is a dictionary like below. { 'id' : id, 'handler' : handler, 'address' : (addr, port) }
625990718e7ae83300eea957
class PictureDownload(ImagesPipeline): <NEW_LINE> <INDENT> def __init__(self, pool, table_name, download_func=None, settings=None): <NEW_LINE> <INDENT> self.pool = pool <NEW_LINE> self.table_name = table_name <NEW_LINE> if isinstance(settings, dict) or settings is None: <NEW_LINE> <INDENT> settings = Settings(settings) <NEW_LINE> <DEDENT> resolve = functools.partial(self._key_for_pipe, base_class_name="ImagesPipeline", settings=settings) <NEW_LINE> self.expires = settings.getint( resolve("IMAGES_EXPIRES"), self.EXPIRES ) <NEW_LINE> if not hasattr(self, "IMAGES_RESULT_FIELD"): <NEW_LINE> <INDENT> self.IMAGES_RESULT_FIELD = self.DEFAULT_IMAGES_RESULT_FIELD <NEW_LINE> <DEDENT> if not hasattr(self, "IMAGES_URLS_FIELD"): <NEW_LINE> <INDENT> self.IMAGES_URLS_FIELD = self.DEFAULT_IMAGES_URLS_FIELD <NEW_LINE> <DEDENT> self.images_urls_field = settings.get( resolve('IMAGES_URLS_FIELD'), self.IMAGES_URLS_FIELD ) <NEW_LINE> self.images_result_field = settings.get( resolve('IMAGES_RESULT_FIELD'), self.IMAGES_RESULT_FIELD ) <NEW_LINE> self.min_width = settings.getint( resolve('IMAGES_MIN_WIDTH'), self.MIN_WIDTH ) <NEW_LINE> self.min_height = settings.getint( resolve('IMAGES_MIN_HEIGHT'), self.MIN_HEIGHT ) <NEW_LINE> self.thumbs = settings.get( resolve('IMAGES_THUMBS'), self.THUMBS ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_settings(cls, settings): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pool = happybase.ConnectionPool(size=5, protocol='compact', transport='framed', host=settings["HBASE_HOSTS"], autoconnect=False) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> sys.exit(1) <NEW_LINE> <DEDENT> return cls(pool, settings['HBASE_TABLE2']) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> return cls.from_settings(crawler.settings) <NEW_LINE> <DEDENT> def image_downloaded(self, response, request, info): <NEW_LINE> <INDENT> print("*\n" * 5, "正在下载图片") <NEW_LINE> checksum = None <NEW_LINE> for path, image, buf in self.get_images(response, request, info): <NEW_LINE> <INDENT> if checksum is None: <NEW_LINE> <INDENT> buf.seek(0) <NEW_LINE> checksum = md5sum(buf) <NEW_LINE> <DEDENT> time = self._get_time() <NEW_LINE> try: <NEW_LINE> <INDENT> with self.pool.connection() as connection: <NEW_LINE> <INDENT> table = connection.table(self.table_name) <NEW_LINE> table.put(path, {"cf:content": buf.getvalue(), "cf:size": "880X600"}) <NEW_LINE> connection.close() <NEW_LINE> print("successfully storing image into hbase,{time},{id}".format(type=type, time=time, id=path)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Caught Hbase exception of image storing:{e}".format(e=str(e))) <NEW_LINE> print("failed storing image into hbase,{time},{id}".format(type=type, time=time, id=path)) <NEW_LINE> <DEDENT> <DEDENT> return checksum <NEW_LINE> <DEDENT> def file_path(self, request, response=None, info=None): <NEW_LINE> <INDENT> super(PictureDownload, self).file_path(request, response, info) <NEW_LINE> image_guid = request.url <NEW_LINE> return request.url
继承于图片下载管道ImagesPipeline
62599071cc0a2c111447c734
class UserAnswer(models.Model): <NEW_LINE> <INDENT> reply = models.ForeignKey(Reply, on_delete=models.CASCADE) <NEW_LINE> answer = models.ForeignKey(Answer, on_delete=models.CASCADE, default=1)
класс Вариант пользователя вариант ответа на вопрос теста - reply - пользовательский ответ - answer - Выбранный ответ из теста
62599071796e427e5385003f
class GameState: <NEW_LINE> <INDENT> WIN = "Win!" <NEW_LINE> LOSS = "Loss!" <NEW_LINE> DRAW = "Draw!" <NEW_LINE> IN_PROGRESS = "Still playing..." <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "< Tic Tac Toe Game States >"
Game End States
62599071e1aae11d1e7cf470
@attr.s(auto_attribs=True, init=False) <NEW_LINE> class DashboardAggregateTableLookml(model.Model): <NEW_LINE> <INDENT> dashboard_id: Optional[str] = None <NEW_LINE> aggregate_table_lookml: Optional[str] = None <NEW_LINE> def __init__( self, *, dashboard_id: Optional[str] = None, aggregate_table_lookml: Optional[str] = None ): <NEW_LINE> <INDENT> self.dashboard_id = dashboard_id <NEW_LINE> self.aggregate_table_lookml = aggregate_table_lookml
Attributes: dashboard_id: Dashboard Id aggregate_table_lookml: Aggregate Table LookML
6259907123849d37ff85297d
class _GzipMessageDelegate(httputil.HTTPMessageDelegate): <NEW_LINE> <INDENT> def __init__(self, delegate, chunk_size): <NEW_LINE> <INDENT> self._delegate = delegate <NEW_LINE> self._chunk_size = chunk_size <NEW_LINE> self._decompressor = None <NEW_LINE> <DEDENT> def headers_received(self, start_line, headers): <NEW_LINE> <INDENT> if headers.get("Content-Encoding") == "gzip": <NEW_LINE> <INDENT> self._decompressor = GzipDecompressor() <NEW_LINE> headers.add("X-Consumed-Content-Encoding", headers["Content-Encoding"]) <NEW_LINE> del headers["Content-Encoding"] <NEW_LINE> <DEDENT> return self._delegate.headers_received(start_line, headers) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def data_received(self, chunk): <NEW_LINE> <INDENT> if self._decompressor: <NEW_LINE> <INDENT> compressed_data = chunk <NEW_LINE> while compressed_data: <NEW_LINE> <INDENT> decompressed = self._decompressor.decompress( compressed_data, self._chunk_size) <NEW_LINE> if decompressed: <NEW_LINE> <INDENT> yield gen.maybe_future( self._delegate.data_received(decompressed)) <NEW_LINE> <DEDENT> compressed_data = self._decompressor.unconsumed_tail <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yield gen.maybe_future(self._delegate.data_received(chunk)) <NEW_LINE> <DEDENT> <DEDENT> def finish(self): <NEW_LINE> <INDENT> if self._decompressor is not None: <NEW_LINE> <INDENT> tail = self._decompressor.flush() <NEW_LINE> if tail: <NEW_LINE> <INDENT> self._delegate.data_received(tail) <NEW_LINE> <DEDENT> <DEDENT> return self._delegate.finish()
Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``.
6259907156b00c62f0fb4196
class Debitor(JSONMixin, object): <NEW_LINE> <INDENT> def __init__(self, typ=None, name=None, description=None, integration_data=None, aggregation_key=None): <NEW_LINE> <INDENT> self.type = typ <NEW_LINE> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.integration_data = integration_data <NEW_LINE> self.aggregation_key = aggregation_key <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_api_data(cls, data): <NEW_LINE> <INDENT> kwargs = { 'typ': data.get('debitor_type'), 'name': data.get('debitor_name'), 'description': data.get('debitor_desc'), 'integration_data': data.get('debitor_integration_data', {}), 'aggregation_key': data.get('debitor_aggregation_key'), } <NEW_LINE> return cls(**kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return u'<Debitor {}:{}>'.format(self.type, self.name)
Information about a 3rd party that will take payment from your customer. This information is primarily used for bypassing callouts an integrating directly with payment providers on the front end. When your account is set up to sell on credit (i.e. you are always taking payment from the customer in your application directly), this information will not be present, and it should not be relevant. When the source system is taking payment this information will not be present. When debitor information is not present or you are not front end integrating you should refer to the :attr:`Reservation.needs_payment_card <pyticketswitch.reservation.Reservation.needs_payment_card>`, :attr:`Reservation.needs_email_address <pyticketswitch.reservation.Reservation.needs_email_address>`, and :attr:`Reservation.needs_agent_reference <pyticketswitch.reservation.Reservation.needs_agent_reference>` attributes as to what information you need to pass back to the API for purchasing tickets. Regardless of the debitor it's advisable to implement the full purchase/callout/callback process in the event that your front end integration goes awry. :ref:`See front end integrations for more information <frontend_integrations>` Attributes: type (str): all debitors with the same type can be assumed to integrate in the same manor, however their parameters and integration data might be different. name (str): name of the specific implementation of the debitor. description (str): human readable description of the debitor. integration_data (dict): data used to do front end integrations. For the format of this data please consult :ref:`the documentation for the relevant debitor type <frontend_integrations>`. aggregation_key (str): a key used to identify if debitors are the same for purposes of payment aggregation between bundles
62599071fff4ab517ebcf0e1
class neoHookean(MaterialModel): <NEW_LINE> <INDENT> def model_info(self): <NEW_LINE> <INDENT> self.num_parameters = 2 <NEW_LINE> self.kinematic_measure = "RightCauchyGreen" <NEW_LINE> <DEDENT> def strain_energy(self, parameters): <NEW_LINE> <INDENT> J = fenics.sqrt(fenics.det(self.C)) <NEW_LINE> Cbar = J ** (-2.0 / 3.0) * self.C <NEW_LINE> half_nkT, bulk = parameters['half_nkT'], parameters['bulk'] <NEW_LINE> return half_nkT * (fenics.tr(Cbar) - 3.0) + bulk * (J - 1.0) ** 2
Defines the strain energy function for a neo-Hookean material
62599071442bda511e95d9bb
class PixelPrediction(object): <NEW_LINE> <INDENT> def __init__(self, id_val, pixel_number, k): <NEW_LINE> <INDENT> self.id = id_val <NEW_LINE> self.pixel_number = pixel_number <NEW_LINE> self.k = k <NEW_LINE> self._predicted_arr = None <NEW_LINE> self._neighbors = None <NEW_LINE> self._distances = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{kls}(\n id={id},\n neighbors={n},\n distances={d}\n)'.format( kls=self.__class__.__name__, id=self.id, n=self.neighbors, d=self.distances ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def neighbors(self): <NEW_LINE> <INDENT> return self._neighbors <NEW_LINE> <DEDENT> @neighbors.setter <NEW_LINE> def neighbors(self, neighbors): <NEW_LINE> <INDENT> self._neighbors = neighbors <NEW_LINE> <DEDENT> @property <NEW_LINE> def distances(self): <NEW_LINE> <INDENT> return self._distances <NEW_LINE> <DEDENT> @distances.setter <NEW_LINE> def distances(self, distances): <NEW_LINE> <INDENT> self._distances = distances <NEW_LINE> <DEDENT> def get_predicted_attrs(self): <NEW_LINE> <INDENT> return self._predicted_arr <NEW_LINE> <DEDENT> def set_predicted_attrs(self, arr): <NEW_LINE> <INDENT> self._predicted_arr = arr
Class to hold a given pixel's prediction including neighbor IDs, distances and predicted values for each continuous attribute.
625990714e4d562566373ccd
class AfterEvent(BaseEvent): <NEW_LINE> <INDENT> def __init__(self, interval, callback, args=None, kwargs=None, userid=0, obj=None): <NEW_LINE> <INDENT> super().__init__(callback, args, kwargs, userid, obj) <NEW_LINE> self._interval = interval <NEW_LINE> self.eventid = self._obj.SetEventAfter(interval, userid) <NEW_LINE> _register[self.eventid] = self <NEW_LINE> <DEDENT> def exec(self): <NEW_LINE> <INDENT> del _register[self.eventid] <NEW_LINE> return self._callback(*self._args, **self._kwargs)
指定時間後に実行するATENXA式イベント interval秒後に引数args,キーワード引数kwargsでcallbackを実行します。 args*がNoneなら空のリストが使用されます。 Args: interval: 時間(秒) callback: 実行する関数名 args (optional): 関数実行時に与える引数 kwargs (optional): 関数実行時に与えるキーワード引数 userid (optional): ユーザID obj (optional): イベント発生対象のオブジェクト。 (default=LAYOUT) Example: 1.5秒後に編成オブジェクトtrnでSetTimerVoltage >>> AfterEvent(1.5, trn.SetTimerVoltage, args=(5.0, 0.2)) 指定時間になると次が実行されます。 >>> trn.SetTimerVoltage(5.0, 0.2) callbackにはユーザーが定義した関数やクラスメソッドを与えることもできます。
62599071ac7a0e7691f73db0
class VideoBlackFailoverSettings(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "BlackDetectThreshold": (double, False), "VideoBlackThresholdMsec": (integer, False), }
`VideoBlackFailoverSettings <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-medialive-channel-videoblackfailoversettings.html>`__
6259907121bff66bcd72452e
class Validator: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def validate(self, value, context=''): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> is_numeric = False
base class for all value validators each should have its own constructor, and override: validate: function of two args: value, context value is what you're testing context is a string identifying the caller better raises an error (TypeError or ValueError) if the value fails is_numeric: is this a numeric type (so it can be swept)?
62599071a05bb46b3848bd8f
class PartyTestsDatabaseNotLoggedIn(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = app.test_client() <NEW_LINE> app.config['TESTING'] = True <NEW_LINE> connect_to_db(app, "postgresql:///testdb") <NEW_LINE> db.create_all() <NEW_LINE> example_data() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> db.session.close() <NEW_LINE> db.drop_all() <NEW_LINE> <DEDENT> def test_games(self): <NEW_LINE> <INDENT> result = self.client.get("/games", follow_redirects=True) <NEW_LINE> self.assertIn("board games, rainbows, and ice cream", result.data)
Flask tests that use the database.
6259907176e4537e8c3f0e46
class Log: <NEW_LINE> <INDENT> stdlog = logging.getLogger('stdlog') <NEW_LINE> def getlog(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> numeric_level = getattr(logging, cfg['LOGLEVEL'].upper(), None) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('Invalid log level, using default INFO') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.stdlog.setLevel(numeric_level) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> print('Invalid log level: ' + cfg['LOGLEVEL'] + ', using default') <NEW_LINE> self.cfg['LOGLEVEL'] = 'INFO' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> rh = logging.handlers.RotatingFileHandler(cfg['LOGFILE'], maxBytes=5242880, backupCount=3) <NEW_LINE> sh = logging.StreamHandler() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> print('Cannot write to ' + cfg['LOGFILE'] + ', (are you root?)') <NEW_LINE> <DEDENT> if self.stdlog.isEnabledFor(logging.DEBUG): <NEW_LINE> <INDENT> formatter = logging.Formatter('%(message)s') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', "%Y-%m-%d %H:%M:%S") <NEW_LINE> <DEDENT> rh.setFormatter(formatter) <NEW_LINE> sh.setFormatter(formatter) <NEW_LINE> self.stdlog.addHandler(rh) <NEW_LINE> self.stdlog.addHandler(sh) <NEW_LINE> return self.stdlog
Initialize the opendmp logging system. Actually print both to stdout and in a LOGFILE defined in opendmp.conf
62599071009cb60464d02dff
class ShrewTopo(Topo): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Topo.__init__(self) <NEW_LINE> attacker = self.addHost('attacker') <NEW_LINE> server = self.addHost('server') <NEW_LINE> client = self.addHost('client') <NEW_LINE> attacker_friend = self.addHost('friend') <NEW_LINE> server_switch = self.addSwitch('s1') <NEW_LINE> client_switch = self.addSwitch('s2') <NEW_LINE> self.addLink(attacker, server_switch, bw=1.5, delay='2ms', max_queue_size=15) <NEW_LINE> self.addLink(server, server_switch, bw=1.5, delay='2ms', max_queue_size=15) <NEW_LINE> self.addLink(client, client_switch, bw=1.5, delay='2ms', max_queue_size=15) <NEW_LINE> self.addLink(attacker_friend, client_switch, bw=1.5, delay='2ms', max_queue_size=15) <NEW_LINE> self.addLink(server_switch, client_switch, bw=1.5, delay='2ms', max_queue_size=15)
Simple topology for bufferbloat experiment.
625990714e4d562566373cce
class DetectionLayer(KE.Layer): <NEW_LINE> <INDENT> def __init__(self, config=None, **kwargs): <NEW_LINE> <INDENT> super(DetectionLayer, self).__init__(**kwargs) <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> def wrapper(rois, mrcnn_class, mrcnn_bbox, image_meta): <NEW_LINE> <INDENT> b = 0 <NEW_LINE> _, _, window, _ = parse_image_meta(image_meta) <NEW_LINE> detections = refine_detections( rois[b], mrcnn_class[b], mrcnn_bbox[b], window[b], self.config) <NEW_LINE> gap = self.config.DETECTION_MAX_INSTANCES - detections.shape[0] <NEW_LINE> assert gap >= 0 <NEW_LINE> if gap > 0: <NEW_LINE> <INDENT> detections = np.pad(detections, [(0, gap), (0, 0)], 'constant', constant_values=0) <NEW_LINE> <DEDENT> detections = detections.astype(np.float32) <NEW_LINE> return np.reshape(detections, [1, self.config.DETECTION_MAX_INSTANCES, 6]) <NEW_LINE> <DEDENT> return tf.py_func(wrapper, inputs, tf.float32) <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return (None, self.config.DETECTION_MAX_INSTANCES, 6)
Takes classified proposal boxes and their bounding box deltas and returns the final detection boxes. # TODO: Add support for batch_size > 1 Returns: [batch, num_detections, (y1, x1, y2, x2, class_score)] in pixels
6259907166673b3332c31cc5
class Log(object): <NEW_LINE> <INDENT> def __init__(self, filename='', log_dir='', console_output=False): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.baseDir = 'C:\\temp' if is_windows() else '/tmp' <NEW_LINE> if not log_dir: <NEW_LINE> <INDENT> user = os.environ.get('USER') or os.environ.get('USERNAME') or 'noname' <NEW_LINE> log_dir = os.path.join(self.baseDir, 'run-%s-%s-%s' % (user, int(time.time()), os.getpid())) <NEW_LINE> <DEDENT> if not os.path.isdir(log_dir): <NEW_LINE> <INDENT> err_msg = "log dir does not exist. could not proceed." <NEW_LINE> raise Exception(err_msg) <NEW_LINE> <DEDENT> self.logDir = log_dir <NEW_LINE> self.logPath = os.path.join(log_dir, filename) <NEW_LINE> self.name = os.path.splitext(filename)[0] <NEW_LINE> self.logFormat = MyFormatter('%(asctime)s [%(levelname)s %(filename)s::' '%(funcName)s::%(lineno)s::%(threadName)s] %(message)s') <NEW_LINE> self.log = logging.getLogger(self.name) <NEW_LINE> self.log.level = logging.DEBUG <NEW_LINE> self.streamHandler = None <NEW_LINE> self.fileHandler = None <NEW_LINE> self.create_handlers(console_output) <NEW_LINE> self.add_handlers() <NEW_LINE> self.log.info('Command line arguments: %s' % str(sys.argv)) <NEW_LINE> <DEDENT> def create_handlers(self, console_output=False): <NEW_LINE> <INDENT> if console_output: <NEW_LINE> <INDENT> self.streamHandler = logging.StreamHandler(stream=sys.stdout) <NEW_LINE> self.streamHandler.setLevel(logging.DEBUG) <NEW_LINE> self.streamHandler.setFormatter(self.logFormat) <NEW_LINE> <DEDENT> self.fileHandler = logging.FileHandler(self.logPath, mode='a') <NEW_LINE> self.fileHandler.setLevel(logging.DEBUG) <NEW_LINE> self.fileHandler.setFormatter(self.logFormat) <NEW_LINE> <DEDENT> def add_handlers(self): <NEW_LINE> <INDENT> self.log.addHandler(self.fileHandler) <NEW_LINE> if self.streamHandler: <NEW_LINE> <INDENT> self.log.addHandler(self.streamHandler) <NEW_LINE> <DEDENT> <DEDENT> def remove_handlers(self): <NEW_LINE> <INDENT> self.log.removeHandler(self.fileHandler) <NEW_LINE> if self.streamHandler: <NEW_LINE> <INDENT> self.log.removeHandler(self.streamHandler)
Log class for the top-level runUpgrade process and all of its scrtips.
625990711f037a2d8b9e54ce
class ListOrCreateAssignmentRest(BaseNodeListOrCreateView): <NEW_LINE> <INDENT> permissions = (IsAuthenticated,) <NEW_LINE> resource = AssignmentResource <NEW_LINE> def authenticate_postrequest(self, user, parentnode_id): <NEW_LINE> <INDENT> periodadmin_required(user, parentnode_id) <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> qry = super(ListOrCreateAssignmentRest, self).get_queryset() <NEW_LINE> qry = qry.order_by('-publishing_time') <NEW_LINE> return qry
List the subjects where the authenticated user is admin.
6259907171ff763f4b5e9070
class UnrecognizableSourceLanguageError(MerlinError): <NEW_LINE> <INDENT> description = "{0.node.uri}: could not determine the source language" <NEW_LINE> def __init__(self, node, **kwds): <NEW_LINE> <INDENT> super().__init__(**kwds) <NEW_LINE> self.node = node <NEW_LINE> return
Exception raised when the source language of an asset could not be recognized
62599071bf627c535bcb2d93
class Stack(object, ICollection, IEnumerable, ICloneable): <NEW_LINE> <INDENT> def Clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Clone(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Contains(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def CopyTo(self, array, index): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def GetEnumerator(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Peek(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Pop(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Push(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def Synchronized(stack): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ToArray(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __iter__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __len__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(self, *__args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce_ex__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __repr__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Count = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> IsSynchronized = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> SyncRoot = property(lambda self: object(), lambda self, v: None, lambda self: None)
Represents a simple last-in-first-out (LIFO) non-generic collection of objects. Stack() Stack(initialCapacity: int) Stack(col: ICollection)
625990714428ac0f6e659dfc
class Direction1Enum(object): <NEW_LINE> <INDENT> ENUM_IN = 'in' <NEW_LINE> OUT = 'out' <NEW_LINE> BOTH = 'both'
Implementation of the 'Direction1' enum. The leg of the call audio will be played to Attributes: IN: TODO: type description here. OUT: TODO: type description here. BOTH: TODO: type description here.
62599071baa26c4b54d50b73
class TestApiResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testApiResponse(self): <NEW_LINE> <INDENT> model = mparticle.models.api_response.ApiResponse()
ApiResponse unit test stubs
62599071d268445f2663a7c1
class ATMPort(Port): <NEW_LINE> <INDENT> def __init__(self, name, nio=None): <NEW_LINE> <INDENT> super().__init__(name, nio) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def longNameType(): <NEW_LINE> <INDENT> return "ATM" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def shortNameType(): <NEW_LINE> <INDENT> return "a" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def linkType(): <NEW_LINE> <INDENT> return "Serial" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def dataLinkTypes(): <NEW_LINE> <INDENT> return {"ATM": "DLT_ATM_RFC1483"}
ATM port. :param name: port name (string) :param nio: NIO instance to attach to this port
6259907138b623060ffaa4b8
class CF(object): <NEW_LINE> <INDENT> def __init__(self, justify='>', width=0): <NEW_LINE> <INDENT> self.justify = justify <NEW_LINE> self.width = width <NEW_LINE> <DEDENT> def format(self, s): <NEW_LINE> <INDENT> fmtstr = '{:' + self.justify + str(self.width) + "s}" <NEW_LINE> return fmtstr.format(s) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{:' + self.justify + str(self.width) + "s}"
CF instances represent preferred column width and justification.
625990717b180e01f3e49cc8
class Energy(EcobeeObject): <NEW_LINE> <INDENT> __slots__ = [ '_tou', '_energy_feature_state', '_feels_like_mode', '_comfort_preferences', ] <NEW_LINE> attribute_name_map = { 'tou': 'tou', 'energy_feature_state': 'energyFeatureState', 'energyFeatureState': 'energy_feature_state', 'feels_like_mode': 'feelsLikeMode', 'feelsLikeMode': 'feels_like_mode', 'comfort_preferences': 'comfortPreferences', 'comfortPreferences': 'comfort_preferences', } <NEW_LINE> attribute_type_map = { 'tou': 'TimeOfUse', 'energy_feature_state': 'six.text_type', 'feels_like_mode': 'six.text_type', 'comfort_preferences': 'six.text_type', } <NEW_LINE> def __init__( self, tou=None, energy_feature_state=None, feels_like_mode=None, comfort_preferences=None, ): <NEW_LINE> <INDENT> self._tou = tou <NEW_LINE> self._energy_feature_state = energy_feature_state <NEW_LINE> self._feels_like_mode = feels_like_mode <NEW_LINE> self._comfort_preferences = comfort_preferences <NEW_LINE> <DEDENT> @property <NEW_LINE> def tou(self): <NEW_LINE> <INDENT> return self._tou <NEW_LINE> <DEDENT> @property <NEW_LINE> def energy_feature_state(self): <NEW_LINE> <INDENT> return self._energy_feature_state <NEW_LINE> <DEDENT> @property <NEW_LINE> def feels_like_mode(self): <NEW_LINE> <INDENT> return self._feels_like_mode <NEW_LINE> <DEDENT> @property <NEW_LINE> def comfort_preferences(self): <NEW_LINE> <INDENT> return self._comfort_preferences
This class has been manually generated by reverse engineering Attribute names have been generated by converting ecobee property names from camelCase to snake_case. A getter property has been generated for each attribute. A setter property has been generated for each attribute whose value of READONLY is "no". An __init__ argument without a default value has been generated if the value of REQUIRED is "yes". An __init__ argument with a default value of None has been generated if the value of REQUIRED is "no".
625990712c8b7c6e89bd50af
class DublinPublicTransportSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, data, stop, route, name): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self._name = name <NEW_LINE> self._stop = stop <NEW_LINE> self._route = route <NEW_LINE> self._times = self._state = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> if self._times is not None: <NEW_LINE> <INDENT> next_up = "None" <NEW_LINE> if len(self._times) > 1: <NEW_LINE> <INDENT> next_up = self._times[1][ATTR_ROUTE] + " in " <NEW_LINE> next_up += self._times[1][ATTR_DUE_IN] <NEW_LINE> <DEDENT> return { ATTR_DUE_IN: self._times[0][ATTR_DUE_IN], ATTR_DUE_AT: self._times[0][ATTR_DUE_AT], ATTR_STOP_ID: self._stop, ATTR_ROUTE: self._times[0][ATTR_ROUTE], ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_NEXT_UP: next_up, } <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return TIME_MINUTES <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return ICON <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.data.update() <NEW_LINE> self._times = self.data.info <NEW_LINE> try: <NEW_LINE> <INDENT> self._state = self._times[0][ATTR_DUE_IN] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> pass
Implementation of an Dublin public transport sensor.
6259907123849d37ff85297f
class PayeeWrapper(object): <NEW_LINE> <INDENT> swagger_types = { 'payee': 'Payee' } <NEW_LINE> attribute_map = { 'payee': 'payee' } <NEW_LINE> def __init__(self, payee=None): <NEW_LINE> <INDENT> self._payee = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.payee = payee <NEW_LINE> <DEDENT> @property <NEW_LINE> def payee(self): <NEW_LINE> <INDENT> return self._payee <NEW_LINE> <DEDENT> @payee.setter <NEW_LINE> def payee(self, payee): <NEW_LINE> <INDENT> if payee is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `payee`, must not be `None`") <NEW_LINE> <DEDENT> self._payee = payee <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(PayeeWrapper, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PayeeWrapper): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259907121bff66bcd724530
class ResourceType(ModelBase): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> new = super(ResourceType, cls).__new__ <NEW_LINE> if attrs.pop("abstract", None) or not attrs.get("autoregister", True): <NEW_LINE> <INDENT> return new(cls, name, bases, attrs) <NEW_LINE> <DEDENT> for fname, vname, help in attrs.get("translatable_fields", []): <NEW_LINE> <INDENT> attrs[fname] = models.TextField(vname, null=True, blank=True, help_text=help) <NEW_LINE> <DEDENT> return new(cls, name, bases, attrs) <NEW_LINE> <DEDENT> def __repr__(cls): <NEW_LINE> <INDENT> return "<class %s>" % cls.__name__
Metaclass for archival resources. Don't fear the magic. All this does is instantiate models.TextField attributes on subclasses based on their translatable_fields tuple.
625990714527f215b58eb604
class OptProblem(ArchitectureAssembly): <NEW_LINE> <INDENT> solution = Dict({},iotype="in",desc="dictionary of expected values for " "all des_vars and coupling_vars") <NEW_LINE> def check_solution(self,strict=False): <NEW_LINE> <INDENT> error = {} <NEW_LINE> try: <NEW_LINE> <INDENT> for k,v in self.get_parameters().iteritems(): <NEW_LINE> <INDENT> sol = self.solution[k] <NEW_LINE> error[k] = v.evaluate() - sol <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if strict: <NEW_LINE> <INDENT> self.raise_exception("No solution was given for the des_var %s"%str(k),ValueError) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> for k,v in self.get_coupling_vars().iteritems(): <NEW_LINE> <INDENT> sol = self.solution[k] <NEW_LINE> error[k] = (v.indep.evaluate()-sol, v.dep.evaluate()-sol) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if strict: <NEW_LINE> <INDENT> self.raise_exception("No solution was given for the coupling_var %s"%str(k),ValueError) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> for k,v in self.get_objectives().iteritems(): <NEW_LINE> <INDENT> sol = self.solution[k] <NEW_LINE> error[k] = v.evaluate()-sol <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if strict: <NEW_LINE> <INDENT> self.raise_exception("No solution was given for the objective %s"%str(k), ValueError) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return error
Class for specifying test problems for optimization algorithms and architectures.
62599071283ffb24f3cf5172
class AzureStackManagementClient(object): <NEW_LINE> <INDENT> def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, base_url: Optional[str] = None, **kwargs: Any ) -> None: <NEW_LINE> <INDENT> if not base_url: <NEW_LINE> <INDENT> base_url = 'https://management.azure.com' <NEW_LINE> <DEDENT> self._config = AzureStackManagementClientConfiguration(credential, subscription_id, **kwargs) <NEW_LINE> self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._serialize.client_side_validation = False <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.cloud_manifest_file = CloudManifestFileOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.customer_subscriptions = CustomerSubscriptionsOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.products = ProductsOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.registrations = RegistrationsOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.linked_subscriptions = LinkedSubscriptionsOperations( self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> <DEDENT> async def close(self) -> None: <NEW_LINE> <INDENT> await self._client.close() <NEW_LINE> <DEDENT> async def __aenter__(self) -> "AzureStackManagementClient": <NEW_LINE> <INDENT> await self._client.__aenter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> async def __aexit__(self, *exc_details) -> None: <NEW_LINE> <INDENT> await self._client.__aexit__(*exc_details)
Azure Stack. :ivar operations: Operations operations :vartype operations: azure.mgmt.azurestack.aio.operations.Operations :ivar cloud_manifest_file: CloudManifestFileOperations operations :vartype cloud_manifest_file: azure.mgmt.azurestack.aio.operations.CloudManifestFileOperations :ivar customer_subscriptions: CustomerSubscriptionsOperations operations :vartype customer_subscriptions: azure.mgmt.azurestack.aio.operations.CustomerSubscriptionsOperations :ivar products: ProductsOperations operations :vartype products: azure.mgmt.azurestack.aio.operations.ProductsOperations :ivar registrations: RegistrationsOperations operations :vartype registrations: azure.mgmt.azurestack.aio.operations.RegistrationsOperations :ivar linked_subscriptions: LinkedSubscriptionsOperations operations :vartype linked_subscriptions: azure.mgmt.azurestack.aio.operations.LinkedSubscriptionsOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Subscription credentials that uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. :type subscription_id: str :param str base_url: Service URL
625990711b99ca400229019a
class DualLayout(BaseLayout, FundamentalTool): <NEW_LINE> <INDENT> _name = "Dual" <NEW_LINE> _args = [Arg("helper_vertices",Arg.bool,"add helper vertices if there are many-to-many vertices", default=True)] <NEW_LINE> def get_subgraph(self, particle): <NEW_LINE> <INDENT> if particle.initial_state: <NEW_LINE> <INDENT> return "initial" <NEW_LINE> <DEDENT> <DEDENT> def get_particle(self, particle): <NEW_LINE> <INDENT> lo = LayoutNode(particle) <NEW_LINE> lo.subgraph = self.get_subgraph(particle) <NEW_LINE> lo.label = particle.pdgid <NEW_LINE> lo.label_size = self.options["label_size"] <NEW_LINE> if particle.initial_state: <NEW_LINE> <INDENT> lo.width = lo.height = 1.0 <NEW_LINE> <DEDENT> elif "cluster" in particle.tags: <NEW_LINE> <INDENT> lo.label = "cluster (%.4g %seV)" % self.graph.units.pick_mag(particle.pt) <NEW_LINE> <DEDENT> elif "cut_summary" in particle.tags: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif "jet" in particle.tags: <NEW_LINE> <INDENT> jet_id = 0 <NEW_LINE> for tag in particle.tags: <NEW_LINE> <INDENT> if tag != 'jet' and tag[:4] == 'jet_': <NEW_LINE> <INDENT> jet_id = int(tag[4:]) + 1 <NEW_LINE> <DEDENT> <DEDENT> lo.label = "jet {0:d} ({1:.4g} {2:s}eV)".format(jet_id, *self.graph.units.pick_mag(particle.pt)) <NEW_LINE> <DEDENT> return lo <NEW_LINE> <DEDENT> def get_vertex(self, vertex, node_style=None): <NEW_LINE> <INDENT> items = [] <NEW_LINE> if "cut_summary" in vertex.tags and len(vertex.incoming) > 1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> need_help = False <NEW_LINE> if self.options["helper_vertices"]: <NEW_LINE> <INDENT> if len(vertex.incoming) > 1 and len(vertex.outgoing) > 1: <NEW_LINE> <INDENT> need_help = True <NEW_LINE> <DEDENT> elif vertex.vacuum: <NEW_LINE> <INDENT> need_help = True <NEW_LINE> <DEDENT> <DEDENT> if need_help: <NEW_LINE> <INDENT> helper_node = LayoutNode(vertex) <NEW_LINE> helper_node.width = 0.5 <NEW_LINE> helper_node.height = 0.5 <NEW_LINE> items.append(helper_node) <NEW_LINE> for particle in vertex.incoming: <NEW_LINE> <INDENT> items.append(LayoutEdge(vertex, particle, vertex)) <NEW_LINE> <DEDENT> for particle in vertex.outgoing: <NEW_LINE> <INDENT> items.append(LayoutEdge(vertex, vertex, particle)) <NEW_LINE> <DEDENT> return items <NEW_LINE> <DEDENT> for particle in vertex.outgoing: <NEW_LINE> <INDENT> for mother in particle.mothers: <NEW_LINE> <INDENT> items.append(LayoutEdge(vertex, mother, particle)) <NEW_LINE> <DEDENT> <DEDENT> return items
The Dual layout, so named because it is the "Dual" in the graph sense of Feynman diagrams, shows particles as nodes.
625990714e4d562566373cd0
class ValidateResponse(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'error': {'key': 'error', 'type': 'ValidateResponseError'}, } <NEW_LINE> def __init__( self, *, status: Optional[str] = None, error: Optional["ValidateResponseError"] = None, **kwargs ): <NEW_LINE> <INDENT> super(ValidateResponse, self).__init__(**kwargs) <NEW_LINE> self.status = status <NEW_LINE> self.error = error
Describes the result of resource validation. :ivar status: Result of validation. :vartype status: str :ivar error: Error details for the case when validation fails. :vartype error: ~azure.mgmt.web.v2020_09_01.models.ValidateResponseError
62599071627d3e7fe0e08751
class SelectorDIC(ModelSelector): <NEW_LINE> <INDENT> def calculate_score(self, model): <NEW_LINE> <INDENT> scores = [] <NEW_LINE> for word, (X, lengths) in self.hwords.items(): <NEW_LINE> <INDENT> if word != self.this_word: <NEW_LINE> <INDENT> scores.append(model.score(X, lengths)) <NEW_LINE> <DEDENT> <DEDENT> score = model.score(self.X, self.lengths) - np.mean(scores) <NEW_LINE> return score
Select best model based on Discriminative Information Criterion Biem, Alain. "A model selection criterion for classification: Application to hmm topology optimization." Document Analysis and Recognition, 2003. Proceedings. Seventh International Conference on. IEEE, 2003. http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.58.6208&rep=rep1&type=pdf https://pdfs.semanticscholar.org/ed3d/7c4a5f607201f3848d4c02dd9ba17c791fc2.pdf DIC = log(P(X(i)) - 1/(M-1)SUM(log(P(X(all but i))
625990712c8b7c6e89bd50b0
class StateManager(object): <NEW_LINE> <INDENT> def __init__(self, logger=None): <NEW_LINE> <INDENT> import logging <NEW_LINE> self.handles = [ self._clr_states, None, None, None, None, None, self._del_state, self._del_state, None, None, None, None, None, self._add_state, None, None, ] <NEW_LINE> if logger: <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> <DEDENT> def handle_action(self, action, date): <NEW_LINE> <INDENT> id = action.header.action_id <NEW_LINE> if id >= 0 and id < len(self.handles) and self.handles[id]: <NEW_LINE> <INDENT> for m in action.messages: <NEW_LINE> <INDENT> return self.handles[id](m, date) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _clr_states(self, msg, date): <NEW_LINE> <INDENT> self.logger.info("[%s] CLR STATES: %s" % (date, str(msg))) <NEW_LINE> <DEDENT> def _add_state(self, state, date): <NEW_LINE> <INDENT> if state.direction == 2: <NEW_LINE> <INDENT> self.logger.info("[%s] INS STATE: %s" % (date, str(state))) <NEW_LINE> <DEDENT> <DEDENT> def _del_state(self, state, date): <NEW_LINE> <INDENT> self.logger.info("[%s] DEL STATE: %s" % (date, str(state)))
This class is used to manage PF states. It handle all pfsync actions (when parsed into the correct action classes) and log the needed things. This is the class that you should extends or replace if you need more of pfstatelogger.py
6259907167a9b606de547708
class index(app.page): <NEW_LINE> <INDENT> path = '/' <NEW_LINE> def GET(self): <NEW_LINE> <INDENT> return render.index(None)
Main page.
6259907199cbb53fe68327b3
class RunCommandInputSet(InputSet): <NEW_LINE> <INDENT> def set_DatabaseName(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'DatabaseName', value) <NEW_LINE> <DEDENT> def set_Password(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Password', value) <NEW_LINE> <DEDENT> def set_Port(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Port', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ResponseFormat', value) <NEW_LINE> <DEDENT> def set_SQL(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'SQL', value) <NEW_LINE> <DEDENT> def set_Server(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Server', value) <NEW_LINE> <DEDENT> def set_Username(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Username', value)
An InputSet with methods appropriate for specifying the inputs to the RunCommand Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
62599071baa26c4b54d50b75
class Config(object): <NEW_LINE> <INDENT> api_version = 1 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.project = "project" <NEW_LINE> self.project_url = "#" <NEW_LINE> self.repo = None <NEW_LINE> self.pythons = ["{0[0]}.{0[1]}".format(sys.version_info)] <NEW_LINE> self.matrix = {} <NEW_LINE> self.env_dir = "env" <NEW_LINE> self.benchmark_dir = "benchmarks" <NEW_LINE> self.results_dir = "results" <NEW_LINE> self.html_dir = "html" <NEW_LINE> self.show_commit_url = "#" <NEW_LINE> self.hash_length = 8 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, path=None): <NEW_LINE> <INDENT> if not path: <NEW_LINE> <INDENT> path = "asv.conf.json" <NEW_LINE> <DEDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> raise RuntimeError("Config file {0} not found.".format(path)) <NEW_LINE> <DEDENT> conf = Config() <NEW_LINE> d = util.load_json(path, cls.api_version) <NEW_LINE> conf.__dict__.update(d) <NEW_LINE> if not getattr(conf, "repo", None): <NEW_LINE> <INDENT> raise ValueError( "No repo specified in {0} config file.".format(path)) <NEW_LINE> <DEDENT> return conf <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def update(cls, path=None): <NEW_LINE> <INDENT> if not path: <NEW_LINE> <INDENT> path = "asv.conf.json" <NEW_LINE> <DEDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> raise RuntimeError("Config file {0} not found.".format(path)) <NEW_LINE> <DEDENT> util.update_json(cls, path, cls.api_version)
Manages the configuration for a benchmark project.
625990715fcc89381b266dbc
class Direction(Vector, Enum): <NEW_LINE> <INDENT> N = (0, -1) <NEW_LINE> NE = (1, -1) <NEW_LINE> E = (1, 0) <NEW_LINE> SE = (1, 1) <NEW_LINE> S = (0, 1) <NEW_LINE> SW = (-1, 1) <NEW_LINE> W = (-1, 0) <NEW_LINE> NW = (-1, -1) <NEW_LINE> @property <NEW_LINE> def is_diagonal(self): <NEW_LINE> <INDENT> return all(self.value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_cardinal(self): <NEW_LINE> <INDENT> return not self.is_diagonal <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<Direction dx={self.dx}, dy={self.dy}>'
Direction on 2D plane.
6259907192d797404e3897c0
class RSAKeyPair(object): <NEW_LINE> <INDENT> _crypt_padding = padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None) <NEW_LINE> def __init__(self, rsa_key=None): <NEW_LINE> <INDENT> if rsa_key is None: <NEW_LINE> <INDENT> rsa_key = generate_private_key() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.public_key = rsa_key.public_key() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.public_key = rsa_key <NEW_LINE> self.private_key = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.private_key = rsa_key <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_openssh_public_key(cls, content): <NEW_LINE> <INDENT> rsa_key = loads_public_key(want_bytes(content)) <NEW_LINE> return cls(rsa_key) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_openssh_private_key(cls, content, password): <NEW_LINE> <INDENT> rsa_key = loads_private_key(want_bytes(content), password) <NEW_LINE> return cls(rsa_key) <NEW_LINE> <DEDENT> def encrypt(self, message): <NEW_LINE> <INDENT> message = want_bytes(message) <NEW_LINE> return self.public_key.encrypt(message, self._crypt_padding) <NEW_LINE> <DEDENT> def decrypt(self, ciphertext): <NEW_LINE> <INDENT> if self.private_key is None: <NEW_LINE> <INDENT> raise ValueError('cannot decrypt without a private key') <NEW_LINE> <DEDENT> return self.private_key.decrypt(ciphertext, self._crypt_padding) <NEW_LINE> <DEDENT> def openssh_private_key(self): <NEW_LINE> <INDENT> return dumps_private_key(self.private_key) <NEW_LINE> <DEDENT> def openssh_public_key(self): <NEW_LINE> <INDENT> return dumps_public_key(self.public_key)
Experimental! Take your own risk.
6259907163b5f9789fe86a2d
class FloatEntry(ValidatedEntry): <NEW_LINE> <INDENT> VALIDATOR = FloatValidator
Validated floating-point numbers.
625990718e7ae83300eea95a
class OperatingSystemTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> WINDOWS = "Windows" <NEW_LINE> LINUX = "Linux"
The operating system type required by the containers in the container group.
62599071ec188e330fdfa16d
class C1700(Router): <NEW_LINE> <INDENT> def __init__(self, module, server, project, chassis="1720"): <NEW_LINE> <INDENT> super().__init__(module, server, project, platform="c1700") <NEW_LINE> c1700_settings = {"ram": 128, "nvram": 32, "disk0": 0, "disk1": 0, "chassis": "1720", "iomem": 15, "clock_divisor": 8, "slot0": "C1700-MB-1FE"} <NEW_LINE> if chassis in ['1751', '1760']: <NEW_LINE> <INDENT> c1700_settings["slot1"] = "C1700-MB-WIC1" <NEW_LINE> <DEDENT> self._settings.update(c1700_settings) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Router c1700" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def symbolName(): <NEW_LINE> <INDENT> return "Router c1700"
Dynamips c1700 router. :param module: parent module for this node :param server: GNS3 server instance :param project: Project instance
625990712c8b7c6e89bd50b1
class GridPlot(object): <NEW_LINE> <INDENT> def __init__(self, sel, x=['comp(a)', 'formation_energy'], y=['comp(a)', 'formation_energy'], type=None, axis=0, kwargs=None): <NEW_LINE> <INDENT> self.sel = sel <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.axis = axis <NEW_LINE> if type is None: <NEW_LINE> <INDENT> if axis == 1: <NEW_LINE> <INDENT> type = [Scatter]*len(self.x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> type = [Scatter]*len(self.y) <NEW_LINE> <DEDENT> <DEDENT> self.type = type <NEW_LINE> if kwargs is None: <NEW_LINE> <INDENT> kwargs = [dict()]*len(self.type) <NEW_LINE> <DEDENT> self.kwargs = kwargs <NEW_LINE> self._plot() <NEW_LINE> self.sel.selection_callbacks.append(self.update) <NEW_LINE> self.layout = bokeh.models.GridPlot(children=self.p_) <NEW_LINE> <DEDENT> @property <NEW_LINE> def p(self): <NEW_LINE> <INDENT> if self.p_ is None: <NEW_LINE> <INDENT> self._plot() <NEW_LINE> <DEDENT> return self.p_ <NEW_LINE> <DEDENT> def _plot(self): <NEW_LINE> <INDENT> self.casm_p_ = [] <NEW_LINE> self.p_ = [] <NEW_LINE> for i, _y in enumerate(self.y): <NEW_LINE> <INDENT> casm_row = [] <NEW_LINE> row = [] <NEW_LINE> for j, _x in enumerate(self.x): <NEW_LINE> <INDENT> if self.axis == 1: <NEW_LINE> <INDENT> _index = j <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _index = i <NEW_LINE> <DEDENT> _type = self.type[_index] <NEW_LINE> _kwargs = self.kwargs[_index] <NEW_LINE> if _type is Histogram: <NEW_LINE> <INDENT> casm_row.append(_type(self.sel, x=_x, **_kwargs)) <NEW_LINE> row.append(casm_row[-1].p) <NEW_LINE> <DEDENT> elif _type is Scatter: <NEW_LINE> <INDENT> casm_row.append(_type(self.sel, x=_x, y=_y, **_kwargs)) <NEW_LINE> row.append(casm_row[-1].p) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Unknown or unsupported plot type:", _type.name) <NEW_LINE> casm_row.append(None) <NEW_LINE> row.append(None) <NEW_LINE> <DEDENT> <DEDENT> self.casm_p_.append(casm_row) <NEW_LINE> self.p_.append(row) <NEW_LINE> <DEDENT> for r in range(len(self.y)): <NEW_LINE> <INDENT> for c in range(len(self.x)): <NEW_LINE> <INDENT> x_range = self.p_[0][c].x_range if (r != 0) else None <NEW_LINE> y_range = self.p_[r][0].y_range if (c != 0) else None <NEW_LINE> if x_range is not None: <NEW_LINE> <INDENT> self.p_[r][c].x_range = x_range <NEW_LINE> <DEDENT> if y_range is not None: <NEW_LINE> <INDENT> self.p_[r][c].y_range = y_range <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> for row in self.casm_p_: <NEW_LINE> <INDENT> for casm_p_ in row: <NEW_LINE> <INDENT> if hasattr(casm_p_, 'update'): <NEW_LINE> <INDENT> casm_p_.update()
Attributes: p: a bokeh Figure containing formation energies and the convex hull layout: a bokeh layout element holding p sel: a CASM Selection used to make the figure x: the value of the x-axis, 'comp(a)' by default
62599071167d2b6e312b81f4
class AttachDebuggerCommand(Command): <NEW_LINE> <INDENT> def run(self, context, args, kwargs, opargs): <NEW_LINE> <INDENT> import sys <NEW_LINE> sys.path.append(args[0]) <NEW_LINE> import pydevd <NEW_LINE> pydevd.settrace(args[1], port=args[2])
Usage: attach_debugger <path to pydevd egg> <host> <port>
6259907199fddb7c1ca63a38
class PadLayer(Layer): <NEW_LINE> <INDENT> def __init__( self, layer = None, paddings = None, mode = 'CONSTANT', name = 'pad_layer', ): <NEW_LINE> <INDENT> Layer.__init__(self, name=name) <NEW_LINE> assert paddings is not None, "paddings should be a Tensor of type int32. see https://www.tensorflow.org/api_docs/python/tf/pad" <NEW_LINE> self.inputs = layer.outputs <NEW_LINE> print((" [TL] PadLayer %s: paddings:%s mode:%s" % (self.name, list(paddings.get_shape()), mode))) <NEW_LINE> self.outputs = tf.pad(self.inputs, paddings=paddings, mode=mode, name=name) <NEW_LINE> self.all_layers = list(layer.all_layers) <NEW_LINE> self.all_params = list(layer.all_params) <NEW_LINE> self.all_drop = dict(layer.all_drop) <NEW_LINE> self.all_layers.extend( [self.outputs] )
The :class:`PadLayer` class is a Padding layer for any modes and dimensions. Please see `tf.pad <https://www.tensorflow.org/api_docs/python/tf/pad>`_ for usage. Parameters ---------- layer : a :class:`Layer` instance The `Layer` class feeding into this layer. padding : a Tensor of type int32. mode : one of "CONSTANT", "REFLECT", or "SYMMETRIC" (case-insensitive) name : a string or None An optional name to attach to this layer.
6259907176e4537e8c3f0e4a
class Portaudio(object): <NEW_LINE> <INDENT> def initialize(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_Initialize() <NEW_LINE> <DEDENT> def terminate(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_Terminate() <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_count(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_GetDeviceCount() <NEW_LINE> <DEDENT> def device_name(self, devindex: int) -> str: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return _portaudio.Pa_GetDeviceInfo(devindex).contents.name.decode() <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> return f'{err}' <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def default_output_device(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_GetDefaultOutputDevice() <NEW_LINE> <DEDENT> @property <NEW_LINE> def default_input_device(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_GetDefaultInputDevice() <NEW_LINE> <DEDENT> @property <NEW_LINE> def api_count(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_GetHostApiCount() <NEW_LINE> <DEDENT> @property <NEW_LINE> def default_api(self) -> int: <NEW_LINE> <INDENT> return _portaudio.Pa_GetDefaultHostApi() <NEW_LINE> <DEDENT> def __enter__(self) -> object: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.initialize() <NEW_LINE> return self <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> print(err) <NEW_LINE> return object <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_value, traceback) -> bool: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.terminate() <NEW_LINE> return not bool(exc_type) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> print(err) <NEW_LINE> return False
Portaudio class. A portaudio object that can be used with the 'with' statement to initialize and terminate portaudio. Portaudio has to be initialized before any portaudio functions are called.
625990714e4d562566373cd2
class DoubleConv(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, mid_channels=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> if not mid_channels: <NEW_LINE> <INDENT> mid_channels = out_channels <NEW_LINE> <DEDENT> self.double_conv = nn.Sequential( nn.Conv3d(in_channels, mid_channels, kernel_size=3, padding=1), nn.BatchNorm3d(mid_channels), nn.ReLU(inplace=True), nn.Conv3d(mid_channels, out_channels, kernel_size=3, padding=1), nn.BatchNorm3d(out_channels), nn.ReLU(inplace=True) ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return self.double_conv(x)
(convolution => [BN] => ReLU) * 2
625990713317a56b869bf1aa
class TestCase3(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.__regex__ = yare.compile('a*(b|c\))d') <NEW_LINE> <DEDENT> def test_match(self): <NEW_LINE> <INDENT> positive = [ 'bd', 'abd', 'ac)d', 'c)d' ] <NEW_LINE> negative = [ '', ' ', '\\', '\\e', 'acd', 'ac\)d', 'c\)d', 'a*(b|c\))d' ] <NEW_LINE> for case in positive: <NEW_LINE> <INDENT> self.assertTrue(self.__regex__.match(case)) <NEW_LINE> <DEDENT> for case in negative: <NEW_LINE> <INDENT> self.assertFalse(self.__regex__.match(case)) <NEW_LINE> <DEDENT> <DEDENT> def test_match_prefix(self): <NEW_LINE> <INDENT> positive = [ 'bd', 'abd', 'ac)d', 'c)d' ] <NEW_LINE> negative = [ '', ' ', '\\', '\\e', 'acd', 'ac\)d', 'c\)d', 'a*(b|c\))d' ] <NEW_LINE> partial = [ ('bda', 2), ('c)dcd', 3), ('ac)dabcd', 4), ] <NEW_LINE> for case in positive: <NEW_LINE> <INDENT> self.assertEqual(self.__regex__.match_prefix(case), len(case)) <NEW_LINE> <DEDENT> for case in negative: <NEW_LINE> <INDENT> self.assertEqual(self.__regex__.match_prefix(case), 0) <NEW_LINE> <DEDENT> for case in partial: <NEW_LINE> <INDENT> self.assertEqual(self.__regex__.match_prefix(case[0]), case[1])
complex test case 3
62599071627d3e7fe0e08753
class LoadBalancerPaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[LoadBalancer]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(LoadBalancerPaged, self).__init__(*args, **kwargs)
A paging container for iterating over a list of :class:`LoadBalancer <azure.mgmt.network.v2017_10_01.models.LoadBalancer>` object
62599071b7558d5895464b99
class Task(object): <NEW_LINE> <INDENT> default_cmd = "" <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self._container = None <NEW_LINE> self._config = dict(kwargs) <NEW_LINE> cmd = self._config.get("cmd", self.default_cmd) <NEW_LINE> container_name = self._config.get("docker_container_name", None) <NEW_LINE> if container_name is not None: <NEW_LINE> <INDENT> client = docker.from_env() <NEW_LINE> for container in client.containers.list(): <NEW_LINE> <INDENT> if re.match(container_name, container.name): <NEW_LINE> <INDENT> self._container = container <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._cmd = which(cmd) <NEW_LINE> if not self._cmd: <NEW_LINE> <INDENT> raise ValueError("Can't find '{}' binary".format(cmd)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _exec(self, cmds, env=None): <NEW_LINE> <INDENT> if self._container is not None: <NEW_LINE> <INDENT> exit_code, output = self._container.exec_run(cmds, demux=True) <NEW_LINE> return subprocess.CompletedProcess( cmds, exit_code, stdout=output[0], stderr=output[1] ) <NEW_LINE> <DEDENT> return subprocess.run( cmds, env=env, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) <NEW_LINE> <DEDENT> def _hook(self, hook_name): <NEW_LINE> <INDENT> if hook_name in self._config.keys(): <NEW_LINE> <INDENT> hook_bin = which(self._config[hook_name]) <NEW_LINE> if not hook_bin: <NEW_LINE> <INDENT> raise ValueError( "Can't find '{}' binary for {} hook".format(hook_bin, hook_name) ) <NEW_LINE> <DEDENT> self._exec(hook_bin) <NEW_LINE> <DEDENT> <DEDENT> def _run(self): <NEW_LINE> <INDENT> self._exec(self._cmd) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._hook("pre_hook") <NEW_LINE> self._run() <NEW_LINE> self._hook("post_hook")
Parent Task class, if you create a Task, your class must be a child of this class.
625990718a43f66fc4bf3a60
class Token: <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> def __init__(self, token): <NEW_LINE> <INDENT> self.symbol = token <NEW_LINE> self.index = Token.counter <NEW_LINE> Token.counter += 1
Describes a token which is a representation of an indexed string (words or symbols in the grammar). It is the format in which these words and symbols are stored on the stack and in the buffer.
62599071435de62698e9d6d2
class Instruction: <NEW_LINE> <INDENT> def __init__( self, name, values ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.values = values <NEW_LINE> <DEDENT> def to_html( self, **kwArgs ): <NEW_LINE> <INDENT> return ""
A configuration item for the output
6259907138b623060ffaa4ba
@implementer(INetwork) <NEW_LINE> class HostNetwork(object): <NEW_LINE> <INDENT> logger = Logger() <NEW_LINE> def create_proxy_to(self, ip, port): <NEW_LINE> <INDENT> return create_proxy_to(self.logger, ip, port) <NEW_LINE> <DEDENT> def delete_proxy(self, proxy): <NEW_LINE> <INDENT> return delete_proxy(self.logger, proxy) <NEW_LINE> <DEDENT> def open_port(self, port): <NEW_LINE> <INDENT> return open_port(self.logger, port) <NEW_LINE> <DEDENT> def delete_open_port(self, port): <NEW_LINE> <INDENT> return delete_open_port(self.logger, port) <NEW_LINE> <DEDENT> enumerate_proxies = staticmethod(enumerate_proxies) <NEW_LINE> enumerate_open_ports = staticmethod(enumerate_open_ports) <NEW_LINE> def enumerate_used_ports(self): <NEW_LINE> <INDENT> listening = set( conn.laddr[1] for conn in net_connections(kind='tcp') ) <NEW_LINE> proxied = set( proxy.port for proxy in self.enumerate_proxies() ) <NEW_LINE> open_ports = set( open_port.port for open_port in self.enumerate_open_ports() ) <NEW_LINE> return frozenset(listening | proxied | open_ports)
An ``INetwork`` implementation based on ``iptables``.
62599071e5267d203ee6d023
class ThreadPool(object): <NEW_LINE> <INDENT> def __init__(self, size, name): <NEW_LINE> <INDENT> self.stopping = False <NEW_LINE> self.active_workers = 0 <NEW_LINE> self.name = name <NEW_LINE> self.unfinished_tasks = 0 <NEW_LINE> self.mutex = threading.Lock() <NEW_LINE> self.all_tasks_done = threading.Condition(self.mutex) <NEW_LINE> self._queue = Queue.Queue() <NEW_LINE> self._workers = [] <NEW_LINE> for i in range(size): <NEW_LINE> <INDENT> worker = ThreadPoolWorker(self) <NEW_LINE> worker.setName('%s-%d' % (name, i)) <NEW_LINE> self._workers.append(worker) <NEW_LINE> <DEDENT> <DEDENT> def task_add(self, func, args = [], kwargs = {}, callback = None): <NEW_LINE> <INDENT> self.mutex.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.unfinished_tasks+=1 <NEW_LINE> self._queue.put((func, args, kwargs, callback)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.mutex.release() <NEW_LINE> <DEDENT> <DEDENT> add_task = task_add <NEW_LINE> def task_done(self): <NEW_LINE> <INDENT> self.all_tasks_done.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.unfinished_tasks-=1 <NEW_LINE> if self.unfinished_tasks == 0: <NEW_LINE> <INDENT> self.all_tasks_done.notify_all() <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.all_tasks_done.release() <NEW_LINE> <DEDENT> <DEDENT> def worker_done(self): <NEW_LINE> <INDENT> self.mutex.acquire() <NEW_LINE> self.active_workers-=1 <NEW_LINE> self.mutex.release() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> for w in self._workers: <NEW_LINE> <INDENT> w.start() <NEW_LINE> self.mutex.acquire() <NEW_LINE> self.active_workers+=1 <NEW_LINE> self.mutex.release() <NEW_LINE> <DEDENT> <DEDENT> def join(self): <NEW_LINE> <INDENT> self.all_tasks_done.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> while self.unfinished_tasks: <NEW_LINE> <INDENT> self.all_tasks_done.wait(0.1) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.all_tasks_done.release()
A thread pool object, the real workhorse of this module. Methods: __init__(size, name): Arguments: size - the number of threads in the pool name - a name to give to the pool task_add(func, args, kwargs, callback): Arguments: func - callable object to execute on the thread args - a list of the positional arguments for 'func' kwargs - dict of the kwargs for 'func' callback - an optional callback to run after func completes ** Note remember this runs on the same thread as 'func' so the thread will not pickup a new task until the callback completes also. The callback will be passed the return value of 'func'. task_done(): This should not be called directly and should only be called by the run() method of the Worker class. Because in python 2.3 Queue.Queue does not have a task_done() method I implemented it in the pool instead. worker_done(): This should not be called directly and should only be called by the run() method of the Worker class. If a thread is in a 'stopping' state it will not pickup another task from the queue and instead return. Before it returns it calls worker_done() to decrement pool.active_workers. If pool.active_workers == 0 then the pool is stopped. start(): Start the ThreadPool working on its queue. join(): Block until the ThreadPool has finished all of its work.
6259907132920d7e50bc7913
class ThreadSafeBus(ObjectProxy): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if import_exc is not None: <NEW_LINE> <INDENT> raise import_exc <NEW_LINE> <DEDENT> super(ThreadSafeBus, self).__init__(Bus(*args, **kwargs)) <NEW_LINE> self.__wrapped__._lock_send_periodic = nullcontext() <NEW_LINE> self._lock_send = RLock() <NEW_LINE> self._lock_recv = RLock() <NEW_LINE> <DEDENT> def recv(self, timeout=None, *args, **kwargs): <NEW_LINE> <INDENT> with self._lock_recv: <NEW_LINE> <INDENT> return self.__wrapped__.recv(timeout=timeout, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def send(self, msg, timeout=None, *args, **kwargs): <NEW_LINE> <INDENT> with self._lock_send: <NEW_LINE> <INDENT> return self.__wrapped__.send(msg, timeout=timeout, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def filters(self): <NEW_LINE> <INDENT> with self._lock_recv: <NEW_LINE> <INDENT> return self.__wrapped__.filters <NEW_LINE> <DEDENT> <DEDENT> @filters.setter <NEW_LINE> def filters(self, filters): <NEW_LINE> <INDENT> with self._lock_recv: <NEW_LINE> <INDENT> self.__wrapped__.filters = filters <NEW_LINE> <DEDENT> <DEDENT> def set_filters(self, can_filters=None, *args, **kwargs): <NEW_LINE> <INDENT> with self._lock_recv: <NEW_LINE> <INDENT> return self.__wrapped__.set_filters(can_filters=can_filters, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def flush_tx_buffer(self, *args, **kwargs): <NEW_LINE> <INDENT> with self._lock_send: <NEW_LINE> <INDENT> return self.__wrapped__.flush_tx_buffer(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self, *args, **kwargs): <NEW_LINE> <INDENT> with self._lock_send, self._lock_recv: <NEW_LINE> <INDENT> return self.__wrapped__.shutdown(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> with self._lock_send, self._lock_recv: <NEW_LINE> <INDENT> return self.__wrapped__.state <NEW_LINE> <DEDENT> <DEDENT> @state.setter <NEW_LINE> def state(self, new_state): <NEW_LINE> <INDENT> with self._lock_send, self._lock_recv: <NEW_LINE> <INDENT> self.__wrapped__.state = new_state
Contains a thread safe :class:`can.BusABC` implementation that wraps around an existing interface instance. All public methods of that base class are now safe to be called from multiple threads. The send and receive methods are synchronized separately. Use this as a drop-in replacement for :class:`~can.BusABC`. .. note:: This approach assumes that both :meth:`~can.BusABC.send` and :meth:`~can.BusABC._recv_internal` of the underlying bus instance can be called simultaneously, and that the methods use :meth:`~can.BusABC._recv_internal` instead of :meth:`~can.BusABC.recv` directly.
62599071cc0a2c111447c737
class RSAKeyValueType_(SamlBase): <NEW_LINE> <INDENT> c_tag = 'RSAKeyValueType' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = SamlBase.c_children.copy() <NEW_LINE> c_attributes = SamlBase.c_attributes.copy() <NEW_LINE> c_child_order = SamlBase.c_child_order[:] <NEW_LINE> c_cardinality = SamlBase.c_cardinality.copy() <NEW_LINE> c_children['{http://www.w3.org/2000/09/xmldsig#}Modulus'] = ('modulus', Modulus) <NEW_LINE> c_children['{http://www.w3.org/2000/09/xmldsig#}Exponent'] = ('exponent', Exponent) <NEW_LINE> c_child_order.extend(['modulus', 'exponent']) <NEW_LINE> def __init__(self, modulus=None, exponent=None, text=None, extension_elements=None, extension_attributes=None, ): <NEW_LINE> <INDENT> SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes, ) <NEW_LINE> self.modulus=modulus <NEW_LINE> self.exponent=exponent
The http://www.w3.org/2000/09/xmldsig#:RSAKeyValueType element
62599071e1aae11d1e7cf473
class SerialClientMsgSender: <NEW_LINE> <INDENT> def __init__(self, pendant=None): <NEW_LINE> <INDENT> self.pendant = pendant <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def pendant_client_name(): <NEW_LINE> <INDENT> return "serial" <NEW_LINE> <DEDENT> def pendant_client_msg(self, msg): <NEW_LINE> <INDENT> self.pendant.serial_port.write(msg) <NEW_LINE> self.pendant.serial_port.flush()
Class used to pass client messages generated by the Pendant application to the serial device.
62599071ac7a0e7691f73db6
@swagger.model() <NEW_LINE> class ScenarioProject(models.ModelBase): <NEW_LINE> <INDENT> def __init__(self, project='', customs=None, scores=None, trust_indicators=None): <NEW_LINE> <INDENT> self.project = project <NEW_LINE> self.customs = list_default(customs) <NEW_LINE> self.scores = list_default(scores) <NEW_LINE> self.trust_indicators = list_default(trust_indicators) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def attr_parser(): <NEW_LINE> <INDENT> return {'scores': ScenarioScore, 'trust_indicators': ScenarioTI} <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return [self.project == other.project and self._customs_eq(other) and self._scores_eq(other) and self._ti_eq(other)] <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def _customs_eq(self, other): <NEW_LINE> <INDENT> return set(self.customs) == set(other.customs) <NEW_LINE> <DEDENT> def _scores_eq(self, other): <NEW_LINE> <INDENT> return set(self.scores) == set(other.scores) <NEW_LINE> <DEDENT> def _ti_eq(self, other): <NEW_LINE> <INDENT> return set(self.trust_indicators) == set(other.trust_indicators)
@property customs: @ptype customs: C{list} of L{string} @property scores: @ptype scores: C{list} of L{ScenarioScore} @property trust_indicators: @ptype trust_indicators: C{list} of L{ScenarioTI}
62599071442bda511e95d9be
class Url2Netloc(ConverterBase): <NEW_LINE> <INDENT> @ConverterBase.data_to_convert.setter <NEW_LINE> def data_to_convert(self, value: Any) -> None: <NEW_LINE> <INDENT> if not isinstance(value, str): <NEW_LINE> <INDENT> raise TypeError(f"<value> should be {str}, {type(value)} given.") <NEW_LINE> <DEDENT> if not value: <NEW_LINE> <INDENT> raise ValueError("<value> should not be empty.") <NEW_LINE> <DEDENT> super(Url2Netloc, self.__class__).data_to_convert.fset(self, value) <NEW_LINE> <DEDENT> def get_converted(self) -> str: <NEW_LINE> <INDENT> parsed_url = urllib.parse.urlparse(self.data_to_convert) <NEW_LINE> if not parsed_url.netloc and parsed_url.path: <NEW_LINE> <INDENT> netloc = parsed_url.path <NEW_LINE> <DEDENT> elif parsed_url.netloc: <NEW_LINE> <INDENT> netloc = parsed_url.netloc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> netloc = self.data_to_convert <NEW_LINE> <DEDENT> if "//" in netloc: <NEW_LINE> <INDENT> netloc = netloc[netloc.find("//") + 2 :] <NEW_LINE> <DEDENT> if "/" in netloc: <NEW_LINE> <INDENT> netloc = netloc[: netloc.find("/")] <NEW_LINE> <DEDENT> return netloc
Provides the interface for the conversion/extration of the network location of a given URL.
62599071a8370b77170f1c97
class SMPacketServerNSCGSU(SMPacket): <NEW_LINE> <INDENT> command = smcommand.SMServerCommand.NSCGSU <NEW_LINE> _payload = [ (smencoder.SMPayloadType.INT, "section", 1), (smencoder.SMPayloadType.INT, "nb_players", 1), (smencoder.SMPayloadType.MAP, "options", ("section", { 0: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")), 1: (smencoder.SMPayloadType.INTLIST, None, (2, "nb_players")), 2: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")), })) ]
Server command 133 (Scoreboard update) This will update the client's scoreboard. :param int section: Which section to update (0: names, 1:combos, 2: grades) :param int nb_players: Nb of plyaers in this packet :param list options: Int list contining names, combos or grades :Example: >>> from smserver.smutils.smpacket import smpacket >>> packet = smpacket.SMPacketServerNSCGSU( ... section=1, # Update the actual combo ... nb_players=2, # 2 users in this packet ... options=[12, 5] # List containing the combos ... ) >>> print(packet.binary) b'\x00\x00\x00\x07\x85\x01\x02\x00\x0c\x00\x05'
6259907101c39578d7f1439b
class ImageWatermarkInput(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageContent = None <NEW_LINE> self.Width = None <NEW_LINE> self.Height = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageContent = params.get("ImageContent") <NEW_LINE> self.Width = params.get("Width") <NEW_LINE> self.Height = params.get("Height")
图片水印模板输入参数
62599071aad79263cf430083
class Features(dict): <NEW_LINE> <INDENT> def __init__(self, template_dir, initial_content=None, name=None): <NEW_LINE> <INDENT> super(Features, self).__init__(initial_content or {}) <NEW_LINE> self.name = name <NEW_LINE> self.template_dir = template_dir <NEW_LINE> if 'path' not in self: <NEW_LINE> <INDENT> self['path'] = os.path.basename(template_dir) <NEW_LINE> <DEDENT> <DEDENT> def DependenciesForEnvironment(self, environment=None): <NEW_LINE> <INDENT> required = [] <NEW_LINE> optional = [] <NEW_LINE> excluded = [] <NEW_LINE> for r in self.get('requires', []): <NEW_LINE> <INDENT> environments = r['environments'] <NEW_LINE> if '*' in environments or environment in environments: <NEW_LINE> <INDENT> required.append(r) <NEW_LINE> <DEDENT> elif 'generic' in environments or not environment: <NEW_LINE> <INDENT> optional.append(r) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> excluded.append(r) <NEW_LINE> <DEDENT> <DEDENT> return required, optional, excluded <NEW_LINE> <DEDENT> def ExtractPathsFromDependencies(self, dependencies, file_type=None): <NEW_LINE> <INDENT> ret = set() <NEW_LINE> for d in dependencies or []: <NEW_LINE> <INDENT> for f in d.get('files') or []: <NEW_LINE> <INDENT> p = f.get('path') <NEW_LINE> if p and (file_type is None or file_type == f.get('type')): <NEW_LINE> <INDENT> ret.add(p) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ret <NEW_LINE> <DEDENT> def AllDependencyPaths(self): <NEW_LINE> <INDENT> ret = set() <NEW_LINE> for dependency in self.get('requires', []): <NEW_LINE> <INDENT> for f in dependency.get('files') or []: <NEW_LINE> <INDENT> p = f.get('path') <NEW_LINE> if p: <NEW_LINE> <INDENT> ret.add(p) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ret <NEW_LINE> <DEDENT> def FilePathsWeDoNotDependOn(self, environment=None, file_type=None): <NEW_LINE> <INDENT> if not environment and not file_type: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> req, _, _ = self.DependenciesForEnvironment(environment=environment) <NEW_LINE> req_paths = self.ExtractPathsFromDependencies(req, file_type=file_type) <NEW_LINE> all_paths = self.AllDependencyPaths() <NEW_LINE> return all_paths - req_paths
A dictionary describing the features of a particular API variation.
625990717047854f46340c84
class AsyncMsgError(NetException): <NEW_LINE> <INDENT> def __init__(self, msg=''): <NEW_LINE> <INDENT> super(self.__class__, self).__init__(msg)
cup.net.async异步消息相关的异常Exception类
62599071f548e778e596ce5a
class OCSPError(Exception): <NEW_LINE> <INDENT> pass
Base OCSP Error class
625990711f037a2d8b9e54d1
class Alg(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "alg" <NEW_LINE> self.a10_url="/axapi/v3/cgnv6/lsn/alg" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.ftp = {} <NEW_LINE> self.sip = {} <NEW_LINE> self.esp = {} <NEW_LINE> self.pptp = {} <NEW_LINE> self.rtsp = {} <NEW_LINE> self.tftp = {} <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
Class Description:: Change LSN ALG Settings. Class alg supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/cgnv6/lsn/alg`.
625990714e4d562566373cd4
class MyFavCourseView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> course_list = [] <NEW_LINE> fav_courses = UserFavorite.objects.filter(user=request.user, fav_type=1) <NEW_LINE> for fav_course in fav_courses: <NEW_LINE> <INDENT> course_id = fav_course.fav_id <NEW_LINE> course = Course.objects.get(id=course_id) <NEW_LINE> course_list.append(course) <NEW_LINE> <DEDENT> return render(request, 'usercenter-fav-course.html', { "course_list" : course_list })
个人中心,我收藏的课程
625990717d847024c075dca6
class PollForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Poll <NEW_LINE> exclude = ['author', 'created', 'stream'] <NEW_LINE> widgets = { 'due_date': DateWidget(), 'categories': SelectMultipleAndAddWidget(add_url='/categories/add', with_perms=['taxonomy.add_category']), 'tags': SelectMultipleAndAddWidget(add_url='/tags/add', with_perms=['taxonomy.add_tag']) }
Form for poll data.
62599071097d151d1a2c2940
class TagError(Exception): <NEW_LINE> <INDENT> def __init__(self, error_msg=None, status_code=None): <NEW_LINE> <INDENT> super(TagError, self).__init__() <NEW_LINE> self.error_msg = error_msg or self.__class__.__name__ <NEW_LINE> self.status_code = status_code or 400 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr("Error message: %s , Error code: %s" % (self.error_msg, self.error_code))
Generic error for Tags.
62599071283ffb24f3cf5176
class Movie(): <NEW_LINE> <INDENT> def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url)
Represents a movie
6259907144b2445a339b75c5
class Weapon(Item): <NEW_LINE> <INDENT> def __init__(self, name, description, weight, attack, cost): <NEW_LINE> <INDENT> Item.__init__(self, name, description, weight) <NEW_LINE> self._attack = attack <NEW_LINE> self._cost = cost <NEW_LINE> <DEDENT> def getAttack(self): <NEW_LINE> <INDENT> return self._attack <NEW_LINE> <DEDENT> def getCost(self): <NEW_LINE> <INDENT> return self._cost <NEW_LINE> <DEDENT> def getType(self): <NEW_LINE> <INDENT> return ItemType.WEAPON
A class of weapons. Weapon inherits from Item and has the defining parameter, attack.
62599071cc0a2c111447c738
class SnliData(object): <NEW_LINE> <INDENT> def __init__(self, data_file, word2index, sentence_len_limit=-1): <NEW_LINE> <INDENT> self._labels = [] <NEW_LINE> self._premises = [] <NEW_LINE> self._premise_transitions = [] <NEW_LINE> self._hypotheses = [] <NEW_LINE> self._hypothesis_transitions = [] <NEW_LINE> with open(data_file, "rt") as f: <NEW_LINE> <INDENT> for i, line in enumerate(f): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> items = line.split("\t") <NEW_LINE> if items[0] not in POSSIBLE_LABELS: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> premise_items = items[1].split(" ") <NEW_LINE> hypothesis_items = items[2].split(" ") <NEW_LINE> premise_words = get_non_parenthesis_words(premise_items) <NEW_LINE> hypothesis_words = get_non_parenthesis_words(hypothesis_items) <NEW_LINE> if (sentence_len_limit > 0 and (len(premise_words) > sentence_len_limit or len(hypothesis_words) > sentence_len_limit)): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> premise_ids = [ word2index.get(word, UNK_CODE) for word in premise_words] <NEW_LINE> hypothesis_ids = [ word2index.get(word, UNK_CODE) for word in hypothesis_words] <NEW_LINE> self._premises.append(premise_ids) <NEW_LINE> self._hypotheses.append(hypothesis_ids) <NEW_LINE> self._premise_transitions.append(get_shift_reduce(premise_items)) <NEW_LINE> self._hypothesis_transitions.append(get_shift_reduce(hypothesis_items)) <NEW_LINE> assert (len(self._premise_transitions[-1]) == 2 * len(premise_words) - 1) <NEW_LINE> assert (len(self._hypothesis_transitions[-1]) == 2 * len(hypothesis_words) - 1) <NEW_LINE> self._labels.append(POSSIBLE_LABELS.index(items[0]) + 1) <NEW_LINE> <DEDENT> <DEDENT> assert len(self._labels) == len(self._premises) <NEW_LINE> assert len(self._labels) == len(self._hypotheses) <NEW_LINE> assert len(self._labels) == len(self._premise_transitions) <NEW_LINE> assert len(self._labels) == len(self._hypothesis_transitions) <NEW_LINE> <DEDENT> def num_batches(self, batch_size): <NEW_LINE> <INDENT> return int(math.ceil(len(self._labels) / batch_size)) <NEW_LINE> <DEDENT> def get_generator(self, batch_size): <NEW_LINE> <INDENT> zipped = list(zip( self._labels, self._premises, self._premise_transitions, self._hypotheses, self._hypothesis_transitions)) <NEW_LINE> random.shuffle(zipped) <NEW_LINE> (labels, premises, premise_transitions, hypotheses, hypothesis_transitions) = zip( *sorted(zipped, key=lambda x: max(len(x[1]), len(x[3])))) <NEW_LINE> def _generator(): <NEW_LINE> <INDENT> begin = 0 <NEW_LINE> while begin < len(labels): <NEW_LINE> <INDENT> end = min(begin + batch_size, len(labels)) <NEW_LINE> yield (labels[begin:end], pad_and_reverse_word_ids(premises[begin:end]).T, pad_transitions(premise_transitions[begin:end]).T, pad_and_reverse_word_ids(hypotheses[begin:end]).T, pad_transitions(hypothesis_transitions[begin:end]).T) <NEW_LINE> begin = end <NEW_LINE> <DEDENT> <DEDENT> return _generator
A split of SNLI data.
6259907160cbc95b063659d5
class Connection(models.Model): <NEW_LINE> <INDENT> from_friend = models.ForeignKey(User, related_name = 'friend_set') <NEW_LINE> to_friend = models.ForeignKey(User, related_name = 'friends')
Note: Create two of these objects for each connection
6259907199fddb7c1ca63a3a