code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class IFileInfo(Interface): <NEW_LINE> <INDENT> name = Attribute('The name of the file in its container') <NEW_LINE> title = Attribute('The title of the file or folder') <NEW_LINE> modified = Attribute('A string representing the modification time/date ' 'of the file or folder') <NEW_LINE> url = Attribute('A url for the file or folder') <NEW_LINE> mimeinfo = Attribute('Mime information for the file or folder ' '(instance of karl.utilities.interfaces.IMimeInfo)') <NEW_LINE> size = Attribute('File size with units such as MB')
An interface representing file info for display in views
6259905ad486a94d0ba2d59a
@base.vectorize <NEW_LINE> class rpeeks(base.StackInstruction): <NEW_LINE> <INDENT> code = base.opcodes['RPEEKS'] <NEW_LINE> arg_format = ['sw', 'r']
RPEEKS i j Peeks at position pointed to by register stack_pointer - r_j from the thread-local sint stack and assigns to sint register s_i. This instruction is vectorizable
6259905a498bea3a75a590e5
class VGGExtractor(VGGBase): <NEW_LINE> <INDENT> def __init__(self, layers, filters, extras, batch_norm=False, **kwargs): <NEW_LINE> <INDENT> super(VGGExtractor, self).__init__(layers, filters, batch_norm, **kwargs) <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.extras_feature = nn.HybridSequential() <NEW_LINE> for i, config in enumerate(extras['conv']): <NEW_LINE> <INDENT> ex = nn.HybridSequential(prefix='extra%d_' % (i)) <NEW_LINE> with ex.name_scope(): <NEW_LINE> <INDENT> for f, k, s, p in config: <NEW_LINE> <INDENT> ex.add(nn.Conv2D(f, k, s, p, **self.init)) <NEW_LINE> if batch_norm: <NEW_LINE> <INDENT> ex.add(nn.BatchNorm()) <NEW_LINE> ex.add(nn.Activation('relu')) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.extras_feature.add(ex) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> assert len(self.features) == 6 <NEW_LINE> outputs = [] <NEW_LINE> for layer in self.features[:2]: <NEW_LINE> <INDENT> x = layer(x) <NEW_LINE> x = F.Pooling(x, pool_type='max', kernel=(2, 2), stride=(2, 2), pooling_convention='full') <NEW_LINE> <DEDENT> for layer in self.features[2:5]: <NEW_LINE> <INDENT> x = layer(x) <NEW_LINE> outputs.append(x) <NEW_LINE> x = F.Pooling(x, pool_type='max', kernel=(2, 2), stride=(2, 2), pooling_convention='full') <NEW_LINE> <DEDENT> x = self.features[5](x) <NEW_LINE> outputs.append(x) <NEW_LINE> for layer in self.extras_feature: <NEW_LINE> <INDENT> x = layer(x) <NEW_LINE> outputs.append(x) <NEW_LINE> <DEDENT> return outputs
VGG multi layer feature extractor which produces multiple output feature maps Parameters ------------ layers : list of int Number of layer for vgg base network. filters : list of int Number of convolution filters for each layer. extras : dict of str to list Extra layers configurations batch_norm : bool if `True`,will use BatchNorm layers
6259905abe8e80087fbc0656
class Attribute(str): <NEW_LINE> <INDENT> struct = struct.Struct('>2H') <NEW_LINE> def __new__(cls, data, *args, **kwargs): <NEW_LINE> <INDENT> return str.__new__(cls, data) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def decode(cls, data, offset, length): <NEW_LINE> <INDENT> return cls(buffer(data, offset, length)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def encode(cls, msg, data): <NEW_LINE> <INDENT> return cls(data) <NEW_LINE> <DEDENT> @property <NEW_LINE> def padding(self): <NEW_LINE> <INDENT> return (4 - (len(self) % 4)) % 4 <NEW_LINE> <DEDENT> @property <NEW_LINE> def required(self): <NEW_LINE> <INDENT> return self.type < 0x8000
STUN message attribute structure :see: http://tools.ietf.org/html/rfc5389#section-15
6259905a23849d37ff852698
class DownloadLimitReached(OpenSubtitlesError, DownloadLimitExceeded): <NEW_LINE> <INDENT> pass
Exception raised when status is '407 Download limit reached'.
6259905aa219f33f346c7dd8
class DefaultThemePlugin(DefaultThemeUniteOptions, UnitePlugin): <NEW_LINE> <INDENT> saved_conf = models.ForeignKey( DefaultThemeSavedUniteOptions, related_name='instances', related_query_name='instance', blank=True, null=True, verbose_name=_('saved configuration'), help_text=_('Override the unite options with the values from the ' 'selected saved configuration.') ) <NEW_LINE> thumbnail_thumbnail_option = models.ForeignKey( 'filer.ThumbnailOption', null=False, blank=False, verbose_name=_('thumbnail thumbnail option'), related_name='defaulttheme_unite_plugins_thumbnail', related_query_name='defaulttheme_unite_plugin_thumbnail', ) <NEW_LINE> full_thumbnail_option = models.ForeignKey( 'filer.ThumbnailOption', null=False, blank=False, verbose_name=_('fullscreen thumbnail option'), related_name='defaulttheme_unite_plugins_full', related_query_name='defaulttheme_unite_plugin_full', ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Default theme plugin') <NEW_LINE> verbose_name_plural = _('Default theme plugins')
Default theme CMS plugin
6259905a21bff66bcd724238
class Layout: <NEW_LINE> <INDENT> def __init__(self, elem_count, col_count, spacing): <NEW_LINE> <INDENT> self.col_count = col_count <NEW_LINE> self.row_count = math.ceil(elem_count / col_count) <NEW_LINE> self.col_widths = [0] * col_count <NEW_LINE> self.full_width = spacing * (col_count - 1) <NEW_LINE> self.spacing = spacing <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Layout({})".format(", ".join(str(w) for w in self.col_widths)) <NEW_LINE> <DEDENT> def adjust_width(self, elt_idx, elt_width): <NEW_LINE> <INDENT> elt_column = int(elt_idx / self.row_count) <NEW_LINE> width_increase = elt_width - self.col_widths[elt_column] <NEW_LINE> if width_increase > 0: <NEW_LINE> <INDENT> self.col_widths[elt_column] = elt_width <NEW_LINE> self.full_width += width_increase <NEW_LINE> <DEDENT> <DEDENT> def format(self, words): <NEW_LINE> <INDENT> spacer = " " * self.spacing <NEW_LINE> word_count = len(words) <NEW_LINE> for row_idx in range(self.row_count): <NEW_LINE> <INDENT> word_idxs = (self.row_count * col_idx + row_idx for col_idx in range(self.col_count)) <NEW_LINE> row_words = (words[i] for i in word_idxs if i < word_count) <NEW_LINE> yield spacer.join(word.ljust(width) for (word, width) in zip(row_words, self.col_widths))
A layout defines how to represent a list of words. :attr col_count: the number of columns :attr row_count: the number of rows :attr col_widths: the width for each column :attr full_width: the full width of the layout :attr spacing: the spacing between each column
6259905a99cbb53fe68324b3
class Component(object): <NEW_LINE> <INDENT> ...
A component that can be drawn in CAD.
6259905a63b5f9789fe86746
class BulkSerializerMixin(object): <NEW_LINE> <INDENT> def to_internal_value(self, data): <NEW_LINE> <INDENT> ret = super(BulkSerializerMixin, self).to_internal_value(data) <NEW_LINE> view = self.context.get('view') <NEW_LINE> id_attr = view.lookup_url_kwarg or view.lookup_field <NEW_LINE> request_method = getattr(view.request, 'method', '') <NEW_LINE> if all((isinstance(self.root, BulkListSerializer), id_attr, request_method in ('PUT', 'PATCH'))): <NEW_LINE> <INDENT> id_field = self.fields[id_attr] <NEW_LINE> id_value = id_field.get_value(data) <NEW_LINE> ret[id_attr] = id_value <NEW_LINE> <DEDENT> return ret
序列器支持批量操作的混合类
6259905a76e4537e8c3f0b60
class Contact(models.Model): <NEW_LINE> <INDENT> email = models.CharField(max_length=255, null=True) <NEW_LINE> subject = models.CharField(max_length=255, null=True, blank=True) <NEW_LINE> message = models.TextField(null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.email) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'contacts'
Contact information
6259905a0c0af96317c57849
class NaverDict: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.url = 'http://endic.naver.com/search.nhn?sLn=en&isOnlyViewEE=N&query=[word]' <NEW_LINE> <DEDENT> def get_def(self, org_word): <NEW_LINE> <INDENT> kor_word = None <NEW_LINE> eng_defs = "" <NEW_LINE> eng_def = "" <NEW_LINE> hanja = None <NEW_LINE> word = urllib.parse.quote(org_word) <NEW_LINE> url = self.url.replace('[word]', word) <NEW_LINE> html = urllib.request.urlopen(url) <NEW_LINE> soup = BeautifulSoup(html, 'html.parser') <NEW_LINE> kor_word = soup.find(True, {'class':['first']}) <NEW_LINE> try: <NEW_LINE> <INDENT> kor_word = kor_word.span.a.string <NEW_LINE> eng_defs = soup.find(True, {'class':['list_e2']}).dd.div.p.span <NEW_LINE> for string in eng_defs.stripped_strings: <NEW_LINE> <INDENT> eng_def += str(string) + " " <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if kor_word is None: <NEW_LINE> <INDENT> kor_word = org_word <NEW_LINE> <DEDENT> return [(org_word, kor_word, eng_def)]
This class looks up korean vocabulary words using the Naver korean-english dictionary.
6259905acb5e8a47e493cc70
class WithdrawBill(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.WithdrawOrderId = None <NEW_LINE> self.Date = None <NEW_LINE> self.PayAmt = None <NEW_LINE> self.InSubAppId = None <NEW_LINE> self.OutSubAppId = None <NEW_LINE> self.CurrencyType = None <NEW_LINE> self.MetaData = None <NEW_LINE> self.ExtendFieldData = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.WithdrawOrderId = params.get("WithdrawOrderId") <NEW_LINE> self.Date = params.get("Date") <NEW_LINE> self.PayAmt = params.get("PayAmt") <NEW_LINE> self.InSubAppId = params.get("InSubAppId") <NEW_LINE> self.OutSubAppId = params.get("OutSubAppId") <NEW_LINE> self.CurrencyType = params.get("CurrencyType") <NEW_LINE> self.MetaData = params.get("MetaData") <NEW_LINE> self.ExtendFieldData = params.get("ExtendFieldData") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
聚鑫提现订单内容
6259905a462c4b4f79dbcfd9
class MedicalAppointmentStage(models.Model): <NEW_LINE> <INDENT> _name = "medical.appointment.stage" <NEW_LINE> _description = "Stage of Appointment" <NEW_LINE> _rec_name = 'name' <NEW_LINE> _order = "sequence" <NEW_LINE> name = fields.Char( 'Stage Name', size=64, required=True, translate=True, ) <NEW_LINE> sequence = fields.Integer( 'Sequence', help='Used to order stages. Lower is better.', ) <NEW_LINE> requirements = fields.Text( 'Requirements', ) <NEW_LINE> fold = fields.Boolean( 'Folded in KanBan view', help='This stage is folded in the KanBan view when there are no ' 'records in that stage to display.', ) <NEW_LINE> is_default = fields.Boolean( 'Default?', help='If checked, this stage will be selected when creating new ' 'appointments.', ) <NEW_LINE> _defaults = {'sequence': 1, 'fold': False, }
Model for case stages. This models the main stages of an appointment management flow. Main CRM objects (leads, opportunities, project issues, ...) will now use only stages, instead of state and stages. Stages are for example used to display the kanban view of records.
6259905aa8ecb033258727eb
class Record(): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.__dict__ = kwargs <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, buf): <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> for row in buf.split('\n'): <NEW_LINE> <INDENT> if not row: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> column, value = _record_row_parser(row) <NEW_LINE> value = _record_value_parser(value) <NEW_LINE> kwargs[column] = value <NEW_LINE> <DEDENT> return cls(**kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> def _sort(items): <NEW_LINE> <INDENT> return sorted(items, key=lambda x: x[0]) <NEW_LINE> <DEDENT> return ('%s(' % self.__class__.__name__ + ', '.join(['%s=%s' % (k, repr(v)) for k, v in _sort(self.__dict__.items())]) + ')') <NEW_LINE> <DEDENT> __str__ = __repr__
Record object of OVSDB table. Attributes are corresponding to columns of parsed tables.
6259905a8a43f66fc4bf3762
class Commentable(models.Model): <NEW_LINE> <INDENT> n_comments = models.IntegerField( verbose_name=_('number of comments'), blank=True, default=0, editable=False, db_index=True ) <NEW_LINE> commenting = EnumIntegerField(Commenting, verbose_name=_('commenting'), default=Commenting.NONE) <NEW_LINE> commenting_map_tools = EnumIntegerField(CommentingMapTools, verbose_name=_('commenting_map_tools'), default=CommentingMapTools.NONE) <NEW_LINE> voting = EnumIntegerField(Commenting, verbose_name=_('voting'), default=Commenting.REGISTERED) <NEW_LINE> def recache_n_comments(self): <NEW_LINE> <INDENT> new_n_comments = self.comments.count() <NEW_LINE> if new_n_comments != self.n_comments: <NEW_LINE> <INDENT> self.n_comments = new_n_comments <NEW_LINE> self.save(update_fields=("n_comments",)) <NEW_LINE> <DEDENT> if hasattr(self, 'hearing'): <NEW_LINE> <INDENT> self.hearing.recache_n_comments() <NEW_LINE> <DEDENT> <DEDENT> def check_commenting(self, request): <NEW_LINE> <INDENT> is_authenticated = request.user.is_authenticated <NEW_LINE> if self.commenting == Commenting.NONE: <NEW_LINE> <INDENT> raise ValidationError(_("%s does not allow commenting") % self, code="commenting_none") <NEW_LINE> <DEDENT> elif self.commenting == Commenting.REGISTERED: <NEW_LINE> <INDENT> if not is_authenticated: <NEW_LINE> <INDENT> raise ValidationError(_("%s does not allow anonymous commenting") % self, code="commenting_registered") <NEW_LINE> <DEDENT> <DEDENT> elif self.commenting == Commenting.STRONG: <NEW_LINE> <INDENT> if not is_authenticated: <NEW_LINE> <INDENT> raise ValidationError(_("%s requires strong authentication for commenting") % self, code="commenting_registered_strong") <NEW_LINE> <DEDENT> elif not request.user.has_strong_auth and not request.user.get_default_organization(): <NEW_LINE> <INDENT> raise ValidationError(_("%s requires strong authentication for commenting") % self, code="commenting_registered_strong") <NEW_LINE> <DEDENT> <DEDENT> elif self.commenting == Commenting.OPEN: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("Not implemented") <NEW_LINE> <DEDENT> <DEDENT> def check_voting(self, request): <NEW_LINE> <INDENT> is_authenticated = request.user.is_authenticated <NEW_LINE> if self.voting == Commenting.NONE: <NEW_LINE> <INDENT> raise ValidationError(_("%s does not allow voting") % self, code="voting_none") <NEW_LINE> <DEDENT> elif self.voting == Commenting.REGISTERED: <NEW_LINE> <INDENT> if not is_authenticated: <NEW_LINE> <INDENT> raise ValidationError(_("%s does not allow anonymous voting") % self, code="voting_registered") <NEW_LINE> <DEDENT> <DEDENT> elif self.voting == Commenting.STRONG: <NEW_LINE> <INDENT> if not is_authenticated: <NEW_LINE> <INDENT> raise ValidationError(_("%s requires strong authentication for voting") % self, code="voting_registered_strong") <NEW_LINE> <DEDENT> elif not request.user.has_strong_auth and not request.user.get_default_organization(): <NEW_LINE> <INDENT> raise ValidationError(_("%s requires strong authentication for voting") % self, code="voting_registered_strong") <NEW_LINE> <DEDENT> <DEDENT> elif self.voting == Commenting.OPEN: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("Not implemented") <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True
Mixin for models which can be commented.
6259905a3539df3088ecd870
@register_resource <NEW_LINE> class v1_NamedRole(Resource): <NEW_LINE> <INDENT> __kind__ = 'v1.NamedRole' <NEW_LINE> __fields__ = { 'name': 'name', 'role': 'role', } <NEW_LINE> __types__ = { 'role': 'v1.Role', } <NEW_LINE> __required__ = set([ 'name', 'role', ]) <NEW_LINE> name = None <NEW_LINE> role = None <NEW_LINE> def __init__(self, *, name, role, **_kwargs_): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.role = role <NEW_LINE> super().__init__(**_kwargs_)
NamedRole relates a Role with a name
6259905a3617ad0b5ee0771f
class DiscreteGenerational(Network): <NEW_LINE> <INDENT> __mapper_args__ = {"polymorphic_identity": "discrete-generational"} <NEW_LINE> def __init__(self, generations, generation_size, initial_source): <NEW_LINE> <INDENT> self.property1 = repr(generations) <NEW_LINE> self.property2 = repr(generation_size) <NEW_LINE> self.property3 = repr(initial_source) <NEW_LINE> if self.initial_source: <NEW_LINE> <INDENT> self.max_size = repr(generations * generation_size + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.max_size = repr(generations * generation_size) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def generations(self): <NEW_LINE> <INDENT> return int(self.property1) <NEW_LINE> <DEDENT> @property <NEW_LINE> def generation_size(self): <NEW_LINE> <INDENT> return int(self.property2) <NEW_LINE> <DEDENT> @property <NEW_LINE> def initial_source(self): <NEW_LINE> <INDENT> return bool(self.property3) <NEW_LINE> <DEDENT> def add_node(self, node): <NEW_LINE> <INDENT> nodes = [n for n in self.nodes() if not isinstance(n, Source)] <NEW_LINE> num_agents = len(nodes) <NEW_LINE> curr_generation = int((num_agents - 1) / float(self.generation_size)) <NEW_LINE> node.generation = curr_generation <NEW_LINE> if curr_generation == 0: <NEW_LINE> <INDENT> if self.initial_source: <NEW_LINE> <INDENT> source = min( self.nodes(type=Source), key=attrgetter('creation_time')) <NEW_LINE> source.connect(whom=node) <NEW_LINE> source.transmit(to_whom=node) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> prev_agents = type(node).query .filter_by(failed=False, network_id=self.id, generation=(curr_generation - 1)) .all() <NEW_LINE> prev_fits = [p.fitness for p in prev_agents] <NEW_LINE> prev_probs = [(f / (1.0 * sum(prev_fits))) for f in prev_fits] <NEW_LINE> rnd = random.random() <NEW_LINE> temp = 0.0 <NEW_LINE> for i, probability in enumerate(prev_probs): <NEW_LINE> <INDENT> temp += probability <NEW_LINE> if temp > rnd: <NEW_LINE> <INDENT> parent = prev_agents[i] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> parent.connect(whom=node) <NEW_LINE> parent.transmit(to_whom=node)
A discrete generational network. A discrete generational network arranges agents into none-overlapping generations. Each agent is connected to all agents in the previous generation. If initial_source is true agents in the first generation will connect to the oldest source in the network. generation_size dictates how many agents are in each generation, generations sets how many generations the network involves. Note that this network type assumes that agents have a property called generation. If you agents do not have this property it will not work.
6259905a4e4d5625663739dc
class ctrlport_probe_c(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def make(*args, **kwargs): <NEW_LINE> <INDENT> return _blocks_swig0.ctrlport_probe_c_make(*args, **kwargs) <NEW_LINE> <DEDENT> make = staticmethod(make) <NEW_LINE> def get(self): <NEW_LINE> <INDENT> return _blocks_swig0.ctrlport_probe_c_get(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _blocks_swig0.delete_ctrlport_probe_c <NEW_LINE> __del__ = lambda self : None;
A ControlPort probe to export vectors of signals. This block acts as a sink in the flowgraph but also exports vectors of complex samples over ControlPort. This block simply sends the current vector held in the work function when the queried by a ControlPort client. Constructor Specific Documentation: Make a ControlPort probe block. Args: id : A string ID to name the probe over ControlPort. desc : A string describing the probe.
6259905a097d151d1a2c2641
class Entry: <NEW_LINE> <INDENT> @property <NEW_LINE> def nodes(self): <NEW_LINE> <INDENT> return self.__nodes <NEW_LINE> <DEDENT> @property <NEW_LINE> def time(self): <NEW_LINE> <INDENT> return self.__time <NEW_LINE> <DEDENT> @property <NEW_LINE> def reason(self): <NEW_LINE> <INDENT> return self.__reason <NEW_LINE> <DEDENT> def __init__(self, entry): <NEW_LINE> <INDENT> fields = entry.split('\t') <NEW_LINE> if len(fields) == 3: <NEW_LINE> <INDENT> nodes, time, reason = entry.split('\t') <NEW_LINE> self.__nodes = Slurm.parseNodeNames(nodes) <NEW_LINE> self.__time = time.strip() <NEW_LINE> self.__reason = reason.strip() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__nodes = [] <NEW_LINE> self.__time = "" <NEW_LINE> self.__reason = ""
PRIVATE-PUBLIC : accessed through a Slurm.State object Describes a group of nodes which share a time and reason for being in a Slurm state
6259905a7cff6e4e811b7018
class PubSubAPIServicer(object): <NEW_LINE> <INDENT> def PubSub(self, request_iterator, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
PubSubAPI provides a libp2p pubsub API and is equivalent to go-ipfs `ipfs pubsub` subset of commands.
6259905a507cdc57c63a637a
class SearchContinuousSetsRunner(AbstractSearchRunner): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> super(SearchContinuousSetsRunner, self).__init__(args) <NEW_LINE> self._datasetId = args.datasetId <NEW_LINE> <DEDENT> def _run(self, datasetId): <NEW_LINE> <INDENT> iterator = self._client.search_continuous_sets(dataset_id=datasetId) <NEW_LINE> self._output(iterator) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self._datasetId is None: <NEW_LINE> <INDENT> for dataset in self.getAllDatasets(): <NEW_LINE> <INDENT> self._run(dataset.id) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._run(self._datasetId)
Runner class for the continuoussets/search method.
6259905a2ae34c7f260ac6bc
class CachedContextlessDistributionSmoother(ContextlessDistributionSmoother): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._parse_result_smooth_count_map = { 0: 0.008502447849533839, 1: 0.21471040149642595, 2: 1.1129522477195453, 3: 1.8991528085245906, 4: 3.488358849588453, 5: 3.9048094131380293 } <NEW_LINE> self._word_smooth_count_map = { 0: 0.05562595701990601, 1: 0.22317104256946213, 2: 1.0726985250469303, 3: 1.9161010077475567, 4: 3.182159988876787, 5: 4.016279163368528 } <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def smooth_parse_result_occurrence_count(self, parse_result_occurrence_count): <NEW_LINE> <INDENT> return self._parse_result_smooth_count_map.get(parse_result_occurrence_count, parse_result_occurrence_count) <NEW_LINE> <DEDENT> def smooth_word_occurrence_count(self, word_occurrence_count): <NEW_LINE> <INDENT> return self._word_smooth_count_map.get(word_occurrence_count, word_occurrence_count)
Since SimpleGoodTuringContextlessDistributionSmoother takes a lot of time to initialize, this class returns the smooth counts which are calculated before. The values are calculated using test_simplegoodturingcontextlessdistributionsmoother.py. In a production app, these values should be cached in a db collection and updated incrementally over the time.
6259905a0fa83653e46f64bb
class TaxonomyAdminMenuPlugin(Plugin): <NEW_LINE> <INDENT> implements(IPluginBlock) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = 'AdminLeftPanelBottomPanePlugin' <NEW_LINE> self.plugin_guid = '63eadfa2-aa3b-4a0c-9232-b3ff568e5eb6' <NEW_LINE> <DEDENT> def return_string(self, tagname, *args): <NEW_LINE> <INDENT> return {'guid': self.plugin_guid, 'template': 'taxonomy/admin/admin_leftpanel_pane.html'}
adds a menu item to the admin screen
6259905a435de62698e9d3d9
class TranslateWizard(Wizard): <NEW_LINE> <INDENT> __name__ = 'translate.wizard' <NEW_LINE> start = StateView( 'translate.wizard.start', 'translate.view_translate_wizard_start', [ Button('Cancel', 'end', 'tryton-cancel'), Button('Translate', 'translate', 'tryton-ok', default=True), ]) <NEW_LINE> translate = StateTransition() <NEW_LINE> translation = StateView( 'translate.wizard.translation', 'translate.view_translate_wizard_translation', [ Button('Cancel', 'end', 'tryton-cancel'), Button('Apply', 'update', 'tryton-ok', default=True), ]) <NEW_LINE> update = StateTransition() <NEW_LINE> def transition_translate(self): <NEW_LINE> <INDENT> context = Transaction().context <NEW_LINE> context['source_lang'] = self.start.source_lang <NEW_LINE> context['target_lang'] = self.start.target_lang <NEW_LINE> context['translator'] = self.start.translator <NEW_LINE> return 'translation' <NEW_LINE> <DEDENT> def transition_update(self): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> context = Transaction().context <NEW_LINE> model = context.get('active_model', None) <NEW_LINE> translates = Translate.search([('model.model', '=', model)], limit=1) <NEW_LINE> active_id = context.get('active_id', None) <NEW_LINE> Model = pool.get(model) <NEW_LINE> model = Model(active_id) <NEW_LINE> if translates: <NEW_LINE> <INDENT> translate, = translates <NEW_LINE> with Transaction().set_context( language=self.start.target_lang): <NEW_LINE> <INDENT> data = {} <NEW_LINE> for f in translate.model_fields: <NEW_LINE> <INDENT> if getattr(self.translation, 'translate_%s' % f.name, False): <NEW_LINE> <INDENT> data[f.name] = getattr(self.translation, 'translation_%s' % f.name) <NEW_LINE> <DEDENT> <DEDENT> Model.write([model], data) <NEW_LINE> <DEDENT> <DEDENT> return 'end'
Translate Wizard
6259905a004d5f362081fad8
class VGG2L(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channel: int = 1): <NEW_LINE> <INDENT> super(VGG2L, self).__init__() <NEW_LINE> self.conv1_1 = torch.nn.Conv2d(in_channel, 64, 3, stride=1, padding=1) <NEW_LINE> self.conv1_2 = torch.nn.Conv2d(64, 64, 3, stride=1, padding=1) <NEW_LINE> self.conv2_1 = torch.nn.Conv2d(64, 128, 3, stride=1, padding=1) <NEW_LINE> self.conv2_2 = torch.nn.Conv2d(128, 128, 3, stride=1, padding=1) <NEW_LINE> self.in_channel = in_channel <NEW_LINE> <DEDENT> def forward(self, xs_pad: torch.Tensor, ilens: torch.Tensor, **kwargs): <NEW_LINE> <INDENT> logging.debug(self.__class__.__name__ + " input lengths: " + str(ilens)) <NEW_LINE> xs_pad = xs_pad.view( xs_pad.size(0), xs_pad.size(1), self.in_channel, xs_pad.size(2) // self.in_channel, ).transpose(1, 2) <NEW_LINE> xs_pad = F.relu(self.conv1_1(xs_pad)) <NEW_LINE> xs_pad = F.relu(self.conv1_2(xs_pad)) <NEW_LINE> xs_pad = F.max_pool2d(xs_pad, 2, stride=2, ceil_mode=True) <NEW_LINE> xs_pad = F.relu(self.conv2_1(xs_pad)) <NEW_LINE> xs_pad = F.relu(self.conv2_2(xs_pad)) <NEW_LINE> xs_pad = F.max_pool2d(xs_pad, 2, stride=2, ceil_mode=True) <NEW_LINE> if torch.is_tensor(ilens): <NEW_LINE> <INDENT> ilens = ilens.cpu().numpy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ilens = np.array(ilens, dtype=np.float32) <NEW_LINE> <DEDENT> ilens = np.array(np.ceil(ilens / 2), dtype=np.int64) <NEW_LINE> ilens = np.array( np.ceil(np.array(ilens, dtype=np.float32) / 2), dtype=np.int64 ).tolist() <NEW_LINE> xs_pad = xs_pad.transpose(1, 2) <NEW_LINE> xs_pad = xs_pad.contiguous().view( xs_pad.size(0), xs_pad.size(1), xs_pad.size(2) * xs_pad.size(3) ) <NEW_LINE> return xs_pad, ilens, None
VGG-like module. Args: in_channel: number of input channels
6259905acc0a2c111447c5b9
class DiskGroupsDiskIdSchema(DefinitionsSchema): <NEW_LINE> <INDENT> title = "Diskgroups Disk Id Schema" <NEW_LINE> description = "ESXi host diskgroup schema containing disk ids" <NEW_LINE> diskgroups = ArrayItem( title="DiskGroups", description="List of disk groups in an ESXi host", min_items=1, items=DiskGroupDiskIdItem(), required=True, )
Schema of ESXi host diskgroups containing disk ids
6259905a8e7ae83300eea663
class NeuralNetwork: <NEW_LINE> <INDENT> def __init__(self, input_layer_nodes, hidden_layer_nodes, output_layer_nodes, learning_rate): <NEW_LINE> <INDENT> self.input_layer_nodes = input_layer_nodes <NEW_LINE> self.hidden_layer_nodes = hidden_layer_nodes <NEW_LINE> self.output_layer_nodes = output_layer_nodes <NEW_LINE> self.learning_rate = learning_rate <NEW_LINE> self.wih = np.random.normal( 0.0, pow(self.hidden_layer_nodes, -0.5), (self.hidden_layer_nodes, self.input_layer_nodes)) <NEW_LINE> self.who = np.random.normal( 0.0, pow(self.output_layer_nodes, -0.5), (self.output_layer_nodes, self.hidden_layer_nodes)) <NEW_LINE> self.activation_function = lambda x: scipy.special.expit(x) <NEW_LINE> <DEDENT> def train(self, input_data, output_data): <NEW_LINE> <INDENT> input_data = np.array(input_data, ndmin=2).T <NEW_LINE> output_data = np.array(output_data, ndmin=2).T <NEW_LINE> hidden_layer_input = np.dot(self.wih, input_data) <NEW_LINE> hidden_layer_output = self.activation_function(hidden_layer_input) <NEW_LINE> output_layer_input = np.dot(self.who, hidden_layer_output) <NEW_LINE> output_layer_output = self.activation_function(output_layer_input) <NEW_LINE> output_layer_error = output_data - output_layer_output <NEW_LINE> self.who += self.learning_rate * np.dot(output_layer_error * output_layer_output * ( 1 - output_layer_output), hidden_layer_output.T) <NEW_LINE> hidden_layer_error = np.dot(self.who.T, output_layer_error) <NEW_LINE> self.wih += self.learning_rate * np.dot(hidden_layer_error * hidden_layer_output * ( 1 - hidden_layer_output), input_data.T) <NEW_LINE> <DEDENT> def predict(self, input_data): <NEW_LINE> <INDENT> input_data = np.array(input_data, ndmin=2).T <NEW_LINE> hidden_layer_input = np.dot(self.wih, input_data) <NEW_LINE> hidden_layer_output = self.activation_function(hidden_layer_input) <NEW_LINE> output_layer_input = np.dot(self.who, hidden_layer_output) <NEW_LINE> output_layer_output = self.activation_function(output_layer_input) <NEW_LINE> return output_layer_output
神经网络
6259905a0c0af96317c5784a
class On(Builtin): <NEW_LINE> <INDENT> attributes = ('HoldAll',) <NEW_LINE> def apply(self, expr, evaluation): <NEW_LINE> <INDENT> seq = expr.get_sequence() <NEW_LINE> quiet_messages = set(evaluation.get_quiet_messages()) <NEW_LINE> if not seq: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for e in seq: <NEW_LINE> <INDENT> if isinstance(e, Symbol): <NEW_LINE> <INDENT> quiet_messages.discard(Expression('MessageName', e, String('trace'))) <NEW_LINE> <DEDENT> elif check_message(e): <NEW_LINE> <INDENT> quiet_messages.discard(e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> evaluation.message('Message', 'name', e) <NEW_LINE> <DEDENT> evaluation.set_quiet_messages(quiet_messages) <NEW_LINE> <DEDENT> return Symbol('Null')
<dl> <dt>'On[$symbol$::$tag$]' <dd>turns a message on for printing. </dl> >> Off[Power::infy] >> 1 / 0 = ComplexInfinity >> On[Power::infy] >> 1 / 0 : Infinite expression 1 / 0 encountered. = ComplexInfinity
6259905a8a43f66fc4bf3764
class PartySeatingTest(unittest.TestCase): <NEW_LINE> <INDENT> logger = logging.getLogger('PartySeatingTest') <NEW_LINE> data = data <NEW_LINE> party = party <NEW_LINE> def known_test(self, known, A, B): <NEW_LINE> <INDENT> self.assertEqual( len(A) + len(B), len(known), "wrong number of guests: " f"{len(known)} guests, " f"tables hold {len(A)} and {len(B)}" ) <NEW_LINE> for g in range(len(known)): <NEW_LINE> <INDENT> self.assertTrue( g in A or g in B, f"Guest {g} not seated anywhere" ) <NEW_LINE> <DEDENT> for a1, a2 in ((a1, a2) for a2 in A for a1 in A): <NEW_LINE> <INDENT> self.assertNotIn( a2, known[a1], f"Guests {a1} and {a2} seated together, and know each other" ) <NEW_LINE> <DEDENT> for b1, b2 in ((b1, b2) for b2 in B for b1 in B): <NEW_LINE> <INDENT> self.assertNotIn( b2, known[b1], f"Guests {b1} and {b2} seated together, and know each other" ) <NEW_LINE> <DEDENT> <DEDENT> def test_sanity(self): <NEW_LINE> <INDENT> known = [{1, 2}, {0}, {0}] <NEW_LINE> _, A, B = PartySeatingTest.party(known) <NEW_LINE> self.known_test(known, A, B) <NEW_LINE> <DEDENT> def test_party(self): <NEW_LINE> <INDENT> for instance in PartySeatingTest.data: <NEW_LINE> <INDENT> known = instance["known"] <NEW_LINE> expected = instance["expected"] <NEW_LINE> success, A, B = PartySeatingTest.party(known) <NEW_LINE> if not expected: <NEW_LINE> <INDENT> self.assertFalse(success) <NEW_LINE> continue <NEW_LINE> <DEDENT> self.known_test(known, A, B)
Test suite for party seating problem
6259905add821e528d6da46b
class SingleObjectTemplateResponseMixin(JinjaTemplateResponseMixin, _generic_detail.SingleObjectTemplateResponseMixin): <NEW_LINE> <INDENT> pass
Equivalent of django mixin SingleObjectTemplateResponseMixin, but uses Jinja template renderer.
6259905a4e4d5625663739dd
class MeanStatisticGame(object): <NEW_LINE> <INDENT> def __init__(self, N, choices): <NEW_LINE> <INDENT> self.N = N <NEW_LINE> self.choices = choices <NEW_LINE> self.statistics = xrange((self.N-1)*min(self.choices), (self.N-1)*max(self.choices)+1) <NEW_LINE> <DEDENT> def table(self): <NEW_LINE> <INDENT> return [ [ own, others, self.payoff(own, others) ] for (own, others) in cartesian(self.choices, self.statistics) ] <NEW_LINE> <DEDENT> def matrix(self): <NEW_LINE> <INDENT> return [ [ "" ] + [ other for other in self.statistics ] ] + [ [ own ] + [ self.payoff(own, other) for other in self.statistics ] for own in self.choices ] <NEW_LINE> <DEDENT> def nfg(self): <NEW_LINE> <INDENT> game = libgambit.NewTable([ len(self.choices) for i in xrange(self.N) ]) <NEW_LINE> game.SetTitle(self.title()) <NEW_LINE> for (pl, player) in enumerate(game.Players()): <NEW_LINE> <INDENT> for (st, strategy) in enumerate(player.Strategies()): <NEW_LINE> <INDENT> strategy.SetLabel(str(self.choices[st])) <NEW_LINE> <DEDENT> <DEDENT> for cont in game.contingencies(): <NEW_LINE> <INDENT> outcome = game.NewOutcome() <NEW_LINE> cont.SetOutcome(outcome) <NEW_LINE> choices = [ int(cont.GetStrategy(pl+1).GetLabel()) for pl in xrange(self.N) ] <NEW_LINE> for pl in xrange(self.N): <NEW_LINE> <INDENT> outcome.SetPayoff(pl+1, self.payoff(choices[pl], sum(choices)-choices[pl])) <NEW_LINE> <DEDENT> <DEDENT> return game <NEW_LINE> <DEDENT> def is_tree(self): return False <NEW_LINE> def efg_file(self): raise NotImplemented <NEW_LINE> def nfg_file(self): return self.nfg().nfg_file() <NEW_LINE> def __getitem__(self, key): <NEW_LINE> <INDENT> if len(key) != 2: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> return self.payoff(key[0], key[1]) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def mixed_strategy(self, point=None): <NEW_LINE> <INDENT> return MSMixedProfile(self, point)
A general mean statistic game: a symmetric game in which the player's payoff depends on his choice and the sum of the choices of the other players. This class is abstract in that it depends upon, but does not define, the method payoff(own, others), which provides the payoff for any given vector of own choice and sum of others' choices.
6259905a4e4d5625663739de
class PingCtrl (Alive): <NEW_LINE> <INDENT> pingLim=3 <NEW_LINE> def __init__ (self): <NEW_LINE> <INDENT> super(PingCtrl, self).__init__(timeoutSec['arpReply']) <NEW_LINE> self.pending = 0 <NEW_LINE> <DEDENT> def sent (self): <NEW_LINE> <INDENT> self.refresh() <NEW_LINE> self.pending += 1 <NEW_LINE> <DEDENT> def failed (self): <NEW_LINE> <INDENT> return self.pending > PingCtrl.pingLim <NEW_LINE> <DEDENT> def received (self): <NEW_LINE> <INDENT> self.pending = 0
Holds information for handling ARP pings for hosts
6259905a7047854f46340996
class IOType(enum.Enum): <NEW_LINE> <INDENT> NONE = enum.auto() <NEW_LINE> INPUT = enum.auto() <NEW_LINE> OUTPUT = enum.auto()
Enum to distinguish between input and output types
6259905a3539df3088ecd873
class SomeComponent: <NEW_LINE> <INDENT> def __init__(self, some_int, some_list_of_objects, some_circular_ref): <NEW_LINE> <INDENT> self.some_int = some_int <NEW_LINE> self.some_list_of_objects = some_list_of_objects <NEW_LINE> self.some_circular_ref = some_circular_ref <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> some_list_of_objects = copy.copy(self.some_list_of_objects) <NEW_LINE> some_circular_ref = copy.copy(self.some_circular_ref) <NEW_LINE> new = self.__class__( self.some_int, some_list_of_objects, some_circular_ref ) <NEW_LINE> new.__dict__.update(self.__dict__) <NEW_LINE> return new <NEW_LINE> <DEDENT> def __deepcopy__(self, memo={}): <NEW_LINE> <INDENT> some_list_of_objects = copy.deepcopy(self.some_list_of_objects, memo) <NEW_LINE> some_circular_ref = copy.deepcopy(self.some_circular_ref, memo) <NEW_LINE> new = self.__class__( self.some_int, some_list_of_objects, some_circular_ref ) <NEW_LINE> new.__dict__.update(self.__dict__) <NEW_LINE> return new
Python provides its own interface of Prototype via `copy.copy` and `copy.deepcopy` functions. And any class that wants to implement custom implementations have to override `__copy__` and `__deepcopy__` member functions.
6259905a55399d3f05627af6
class EntityExtractor(ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def extract_entities(self, text: str, max_retries=5): <NEW_LINE> <INDENT> return dict()
Abstract class for entity extractors. Responsible for processing text and extracting entities such as names, dates, places etc.
6259905a097d151d1a2c2643
class CheckNetworks(action.Action): <NEW_LINE> <INDENT> def run(self, **kwargs): <NEW_LINE> <INDENT> LOG.debug("Checking networks...") <NEW_LINE> overlapped_resources = {} <NEW_LINE> src_net = self.src_cloud.resources[utils.NETWORK_RESOURCE] <NEW_LINE> dst_net = self.dst_cloud.resources[utils.NETWORK_RESOURCE] <NEW_LINE> src_compute = self.src_cloud.resources[utils.COMPUTE_RESOURCE] <NEW_LINE> search_opts = kwargs.get('search_opts_tenant', {}) <NEW_LINE> search_opts.update({'search_opts': kwargs.get('search_opts', {})}) <NEW_LINE> LOG.debug("Retrieving Network information from Source cloud...") <NEW_LINE> ports = src_net.get_ports_list() <NEW_LINE> src_net_info = NetworkInfo(src_net.read_info(**search_opts), ports) <NEW_LINE> LOG.debug("Retrieving Network information from Destination cloud...") <NEW_LINE> dst_net_info = NetworkInfo(dst_net.read_info()) <NEW_LINE> LOG.debug("Retrieving Compute information from Source cloud...") <NEW_LINE> src_compute_info = ComputeInfo(src_compute.read_info(**search_opts)) <NEW_LINE> LOG.info("Check networks overlapping...") <NEW_LINE> nets_overlapped_subnets, nets_overlapped_seg_ids = ( src_net_info.get_overlapped_networks(dst_net_info)) <NEW_LINE> if nets_overlapped_subnets: <NEW_LINE> <INDENT> overlapped_resources.update( {'networks_with_overlapped_subnets': nets_overlapped_subnets}) <NEW_LINE> <DEDENT> if nets_overlapped_seg_ids: <NEW_LINE> <INDENT> LOG.warning("Networks with segmentation IDs overlapping:\n%s", nets_overlapped_seg_ids) <NEW_LINE> <DEDENT> LOG.info("Check floating IPs overlapping...") <NEW_LINE> floating_ips = src_net_info.list_overlapped_floating_ips(dst_net_info) <NEW_LINE> if floating_ips: <NEW_LINE> <INDENT> overlapped_resources.update( {'overlapped_floating_ips': floating_ips}) <NEW_LINE> <DEDENT> LOG.info("Check VMs spawned directly in external networks...") <NEW_LINE> devices = src_net_info.get_devices_from_external_networks() <NEW_LINE> vms_list = src_compute_info.list_vms_in_external_network(devices) <NEW_LINE> if vms_list: <NEW_LINE> <INDENT> overlapped_resources.update({'vms_in_external_network': vms_list}) <NEW_LINE> <DEDENT> if overlapped_resources: <NEW_LINE> <INDENT> LOG.critical('Network overlapping list:\n%s', overlapped_resources) <NEW_LINE> raise exception.AbortMigrationError( "There is a number of overlapping Network resources, so " "migration process can not be continued. Resolve it please and" " try again.")
Check networks segmentation ID, subnets and floating IPs overlapping. Also check if VMs from SRC spawned in external networks directly. Returns list of all overlaps and prints it to the LOG. If this list is non-empty, raise exception (AbortMigrationError). It must be done before actual migration in the 'preparation' section. The action uses filtered search opts and must be run after 'act_get_filter' action.
6259905a23849d37ff85269c
class editMessageCaption(MessageUpdate): <NEW_LINE> <INDENT> def __init__(self, chat_id=None, message_id=None, inline_message_id=None, caption=None, parse_mode=None, reply_markup: InlineKeyboardMarkup = None, message: Message = None, propagate_values: bool = False, propagate_fields: dict = None): <NEW_LINE> <INDENT> super().__init__(propagate_values=propagate_values, propagate_fields=propagate_fields) <NEW_LINE> self.chat_id = chat_id <NEW_LINE> self.message_id = message_id <NEW_LINE> self.inline_message_id = inline_message_id <NEW_LINE> self.caption = caption <NEW_LINE> self.parse_mode = parse_mode <NEW_LINE> self.reply_markup = reply_markup <NEW_LINE> self.message = message
Use this method to edit captions of messages. On success, if edited message is sent by the bot, the edited Message is returned, otherwise True is returned. Parameters ---------- chat_id : Integer or String, optional Required if inline_message_id is not specified. Unique identifier for the target chat or username of the target channel (in the format @channelusername) message_id : Integer, optional Required if inline_message_id is not specified. Identifier of the message to edit inline_message_id : String, optional Required if chat_id and message_id are not specified. Identifier of the inline message caption : String, optional New caption of the message, 0-1024 characters after entities parsing parse_mode : String, optional Mode for parsing entities in the message caption. See formatting options for more details. reply_markup : InlineKeyboardMarkup, optional A JSON-serialized object for an inline keyboard.
6259905ad53ae8145f919a38
class EmailAuth: <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(email=username) <NEW_LINE> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(pk=user_id) <NEW_LINE> if user.is_active: <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
Authenticate a user by an exact match on the email and password
6259905a7cff6e4e811b701a
class Cluster: <NEW_LINE> <INDENT> def __init__(self, extents): <NEW_LINE> <INDENT> assert len(extents)==7 <NEW_LINE> self.extents = list(extents) <NEW_LINE> self.space = [reduce(mul, extents[:i]) for i in range(1,len(extents))] <NEW_LINE> self.space.append(extents[-1]*self.space[3]) <NEW_LINE> <DEDENT> def address_from_nid(self, nid): <NEW_LINE> <INDENT> address = list(self.space) <NEW_LINE> address[6] = nid/self.space[3] <NEW_LINE> nid -= address[6]*self.space[3] <NEW_LINE> address[3] = nid/self.space[2] <NEW_LINE> nid -= address[3]*self.space[2] <NEW_LINE> address[2] = nid/self.space[1] <NEW_LINE> nid -= address[2]*self.space[1] <NEW_LINE> address[1] = nid/self.space[0] <NEW_LINE> nid -= address[1]*self.space[0] <NEW_LINE> address[0] = nid%self.extents[0] <NEW_LINE> address[5] = address[6]%self.extents[5] <NEW_LINE> address[4] = address[6]/self.extents[4] <NEW_LINE> return address <NEW_LINE> <DEDENT> def nid_from_address(self, address): <NEW_LINE> <INDENT> dist = lambda i,addr: addr[i] + (0 if addr[i]>=0 else self.extents[i]) <NEW_LINE> if len(address) == len(self.extents): <NEW_LINE> <INDENT> grp = address[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> grp = address[5]*self.extents[5] + address[4] <NEW_LINE> <DEDENT> nid=dist(0,address) <NEW_LINE> for i in 1,2,3: <NEW_LINE> <INDENT> nid += dist(i,address)*self.space[i-1] <NEW_LINE> <DEDENT> nid += grp*self.space[3] <NEW_LINE> return nid <NEW_LINE> <DEDENT> def nodename_from_address(self, address): <NEW_LINE> <INDENT> nid = self.nid_from_address(address) <NEW_LINE> return 'nid{0:05d}'.format(nid)
stub class for testing
6259905abaa26c4b54d5087b
class LLG2: <NEW_LINE> <INDENT> def __init__(self, mesh, material, **kwargs): <NEW_LINE> <INDENT> self.mesh = mesh <NEW_LINE> self.material = material <NEW_LINE> self.scale = kwargs.pop('scale', 1.0) <NEW_LINE> self.demag_order = kwargs.pop('demag_order', 2) <NEW_LINE> self.VV = VectorFunctionSpace(self.mesh, "CG", 1) <NEW_LINE> self.VS = FunctionSpace(self.mesh, "CG", 1) <NEW_LINE> self.V = self.VV*self.VS <NEW_LINE> self.demag_field = DemagField(self.mesh, order=self.demag_order) <NEW_LINE> <DEDENT> def interpolate(self, expr): <NEW_LINE> <INDENT> return interpolate(expr, self.VV) <NEW_LINE> <DEDENT> def calculate_dm(self, m, dt, **kwargs): <NEW_LINE> <INDENT> u_demag = kwargs.pop('u_demag', None) <NEW_LINE> h_ext = kwargs.pop('h_ext', None) <NEW_LINE> (v, sigma) = TrialFunctions(self.V) <NEW_LINE> (w, mu) = TestFunctions(self.V) <NEW_LINE> f_ex = (- 2.0 * self.material.Aex * self.material.gamma) / (Constants.mu0 * self.material.ms * self.scale**2) <NEW_LINE> a = self.material.alpha * dot(v, w) * dx + dot(cross(m, v), w) * dx <NEW_LINE> a += mu * inner(m, v) * dx + sigma * inner(m, w) * dx <NEW_LINE> a += - 0.5 * dt * f_ex * Dx(v[i],j) * Dx(w[i],j) * dx <NEW_LINE> L = f_ex * Dx(m[i],j) * Dx(w[i],j) * dx <NEW_LINE> if (u_demag is not None): <NEW_LINE> <INDENT> L += - self.material.ms * self.material.gamma * inner(grad(u_demag), w) * dx <NEW_LINE> <DEDENT> if (h_ext is not None): <NEW_LINE> <INDENT> L += self.material.gamma * inner(h_ext, w) * dx <NEW_LINE> <DEDENT> w = Function(self.V) <NEW_LINE> (A, b) = assemble_system(a, L) <NEW_LINE> solve(A, w.vector(), b, "bicgstab", "ilu") <NEW_LINE> (v, sigma) = w.split() <NEW_LINE> dm = interpolate(v, self.VV) <NEW_LINE> return dm <NEW_LINE> <DEDENT> def step(self, m, dt, **kwargs): <NEW_LINE> <INDENT> h_ext = kwargs.pop('h_ext', None) <NEW_LINE> u_demag = self.demag_field.calculate(m) <NEW_LINE> dm = self.calculate_dm(m, dt, u_demag=u_demag, h_ext=h_ext) <NEW_LINE> m.vector().axpy(dt, dm.vector()) <NEW_LINE> return Function(self.VV, DofAssembler.assemble(NormalizedVector(self.VV, m)))
This class defines methods for the numerical integration of the LLG. The effective field includes exchange, demagnetization and external field terms. As opposed to the LLG class here CBC.Block is used and the micromagnetic constraint is imposed only on the nodes.
6259905aa17c0f6771d5d68d
class WithFixtures(object): <NEW_LINE> <INDENT> _FIXTURE_SEPARATOR = '==================================================' <NEW_LINE> @classmethod <NEW_LINE> def setup_class(cls): <NEW_LINE> <INDENT> cls._fixtures = {} <NEW_LINE> base_dir = path.abspath(path.dirname(__file__)) <NEW_LINE> cls_dir = cls.__name__[4:] <NEW_LINE> cls._fixtures_dir = path.join(base_dir, 'fixtures', cls_dir) <NEW_LINE> try: <NEW_LINE> <INDENT> filenames = os.listdir(cls._fixtures_dir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for filename in filenames: <NEW_LINE> <INDENT> if filename.startswith('.'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> fullname = path.join(cls._fixtures_dir, filename) <NEW_LINE> func, key = filename.split('.') <NEW_LINE> if func not in cls._fixtures: <NEW_LINE> <INDENT> cls._fixtures[func] = {} <NEW_LINE> <DEDENT> input, output = open(fullname).read().split( cls._FIXTURE_SEPARATOR) <NEW_LINE> cls._fixtures[func][key] = input.strip(), output.strip() <NEW_LINE> <DEDENT> <DEDENT> def test_fixtures(self): <NEW_LINE> <INDENT> for func in self._fixtures: <NEW_LINE> <INDENT> tester_func = 'fixture_' + func <NEW_LINE> if not hasattr(self, tester_func): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> tester = getattr(self, tester_func) <NEW_LINE> for key, (input, output) in self._fixtures[func].iteritems(): <NEW_LINE> <INDENT> result = tester(key, input) <NEW_LINE> assert result == output, self._fixture_error(func, key, result) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _fixture_error(self, func, key, result): <NEW_LINE> <INDENT> return ( 'Input not equals output in `{0}/{1}.{2}\'! ' 'Output must be:\n\n{3}'.format( self._fixtures_dir, func, key, result))
Preload class fixtures (if any) on class initialization and check with special fixture tester function does the computed result equal precomputed output in fixture. Example of location: tests/ test_serializers.py test_validators.py fixtures/ XML2Config/ # <- class name (without Test prefix) config2dict.1 # <- will be stored in cls._fixtures as config2dict.2 # {'config2dict': {'1': (i, o), '2': (i, o)}} Config2XML/ config2xml.1 # and so on... config2xml.2 config2xml.3 The example above will run :py:meth:`TestXML2Config.fixture_config2dict` with arguments '1', <input from fixture 1> and '2', <input from fixture 2> and compare the output of function and fixture output value. :py:meth:`TestConfig2XML.fixture_config2xml` will be run using the same schema.
6259905a2c8b7c6e89bd4dc5
class CocoValidationDataset(data.Dataset): <NEW_LINE> <INDENT> def __init__(self, root, json, vocab, transform=None): <NEW_LINE> <INDENT> self.root = root <NEW_LINE> self.vocab = vocab <NEW_LINE> self.transform = transform <NEW_LINE> self.coco = COCO(json) <NEW_LINE> ids = list(self.coco.anns.keys()) <NEW_LINE> captions = {} <NEW_LINE> for i in ids: <NEW_LINE> <INDENT> im_id = self.coco.anns[i]['image_id'] <NEW_LINE> if im_id not in captions: <NEW_LINE> <INDENT> captions[im_id] = [] <NEW_LINE> <DEDENT> captions[im_id].append(i) <NEW_LINE> <DEDENT> self.ids = list(captions.keys()) <NEW_LINE> self.captions = captions <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> coco = self.coco <NEW_LINE> vocab = self.vocab <NEW_LINE> img_id = self.ids[idx] <NEW_LINE> path = coco.loadImgs(img_id)[0]['file_name'] <NEW_LINE> image = Image.open(os.path.join(self.root, path)).convert('RGB') <NEW_LINE> if self.transform is not None: <NEW_LINE> <INDENT> image = self.transform(image) <NEW_LINE> <DEDENT> captions = [] <NEW_LINE> for ann_id in self.captions[img_id]: <NEW_LINE> <INDENT> caption = coco.anns[ann_id]['caption'] <NEW_LINE> tokens = nltk.tokenize.word_tokenize(str(caption).lower()) <NEW_LINE> caption = [] <NEW_LINE> caption.append(vocab('<start>')) <NEW_LINE> caption.extend([vocab(token) for token in tokens]) <NEW_LINE> caption.append(vocab('<end>')) <NEW_LINE> captions.append(caption) <NEW_LINE> <DEDENT> return image, captions <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.ids)
COCO Custom Dataset compatible with torch.utils.data.DataLoader.
6259905ab7558d5895464a17
class NameDict(dict): <NEW_LINE> <INDENT> def __init__(self,iterable=[]): <NEW_LINE> <INDENT> dict.__init__(self, ((x.name,x) for x in iterable)) <NEW_LINE> <DEDENT> def append(self,obj): <NEW_LINE> <INDENT> if obj.name in self: <NEW_LINE> <INDENT> raise KeyError("%s already in dict" % obj.name) <NEW_LINE> <DEDENT> self[obj.name] = obj
A dictionary whose constructor accepts an iterable of objects that has a name property. This name property becomes the keys in the dictionary. The objects are the values
6259905a097d151d1a2c2644
class TestCreateSmtpTemplateSender(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCreateSmtpTemplateSender(self): <NEW_LINE> <INDENT> pass
CreateSmtpTemplateSender unit test stubs
6259905a0a50d4780f7068aa
class drivetrain_csm(object): <NEW_LINE> <INDENT> def __init__(self, drivetrain_type='geared'): <NEW_LINE> <INDENT> self.drivetrain_type = drivetrain_type <NEW_LINE> power = np.zeros(161) <NEW_LINE> <DEDENT> def compute(self, aero_power, aero_torque, aero_thrust, rated_power): <NEW_LINE> <INDENT> if self.drivetrain_type == 'geared': <NEW_LINE> <INDENT> constant = 0.01289 <NEW_LINE> linear = 0.08510 <NEW_LINE> quadratic = 0.0 <NEW_LINE> <DEDENT> elif self.drivetrain_type == 'single_stage': <NEW_LINE> <INDENT> constant = 0.01331 <NEW_LINE> linear = 0.03655 <NEW_LINE> quadratic = 0.06107 <NEW_LINE> <DEDENT> elif self.drivetrain_type == 'multi_drive': <NEW_LINE> <INDENT> constant = 0.01547 <NEW_LINE> linear = 0.04463 <NEW_LINE> quadratic = 0.05790 <NEW_LINE> <DEDENT> elif self.drivetrain_type == 'pm_direct_drive': <NEW_LINE> <INDENT> constant = 0.01007 <NEW_LINE> linear = 0.02000 <NEW_LINE> quadratic = 0.06899 <NEW_LINE> <DEDENT> Pbar0 = aero_power / rated_power <NEW_LINE> Pbar1, dPbar1_dPbar0 = smooth_abs(Pbar0, dx=0.01) <NEW_LINE> Pbar, dPbar_dPbar1, _ = smooth_min(Pbar1, 1.0, pct_offset=0.01) <NEW_LINE> eff = 1.0 - (constant/Pbar + linear + quadratic*Pbar) <NEW_LINE> self.power = aero_power * eff <NEW_LINE> <DEDENT> def provideJ(self): <NEW_LINE> <INDENT> dPbar_dPa = dPbar_dPbar1*dPbar1_dPbar0/rated_power <NEW_LINE> dPbar_dPr = -dPbar_dPbar1*dPbar1_dPbar0*aero_power/rated_power**2 <NEW_LINE> deff_dPa = dPbar_dPa*(constant/Pbar**2 - quadratic) <NEW_LINE> deff_dPr = dPbar_dPr*(constant/Pbar**2 - quadratic) <NEW_LINE> dP_dPa = eff + aero_power*deff_dPa <NEW_LINE> dP_dPr = aero_power*deff_dPr <NEW_LINE> self.J = hstack([np.diag(dP_dPa), dP_dPr]) <NEW_LINE> return self.J
drivetrain losses from NREL cost and scaling model
6259905aa79ad1619776b5a9
class GetLatestFoodInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('AccessToken', value) <NEW_LINE> <DEDENT> def set_AccessTokenSecret(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('AccessTokenSecret', value) <NEW_LINE> <DEDENT> def set_ConsumerKey(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('ConsumerKey', value) <NEW_LINE> <DEDENT> def set_ConsumerSecret(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('ConsumerSecret', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('ResponseFormat', value) <NEW_LINE> <DEDENT> def set_UserID(self, value): <NEW_LINE> <INDENT> super(GetLatestFoodInputSet, self)._set_input('UserID', value)
An InputSet with methods appropriate for specifying the inputs to the GetLatestFood Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259905a2ae34c7f260ac6bf
class Triangle(PXPath): <NEW_LINE> <INDENT> SIZE = 20 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> PXPath.__init__(self) <NEW_LINE> x, y = 0, 0 <NEW_LINE> self.moveTo(x, y) <NEW_LINE> x -= self.SIZE * math.cos(math.pi / 6) <NEW_LINE> y -= self.SIZE * math.sin(math.PI / 6) <NEW_LINE> self.lineTo(x, y) <NEW_LINE> y += self.SIZE <NEW_LINE> self.lineTo(x, y) <NEW_LINE> self.closePath() <NEW_LINE> self.paint = NengoStyle.COLOR_LIGHT_PURPLE
Icon which is basically a right-facing equilateral triangle
6259905acc0a2c111447c5ba
class ViewListTest(ListsTest): <NEW_LINE> <INDENT> def test_001(self): <NEW_LINE> <INDENT> self.browser.get(self.live_server_url) <NEW_LINE> self.add_list_item('买一些孔雀羽毛') <NEW_LINE> self.add_list_item('用孔雀羽毛做假蝇') <NEW_LINE> url = self.browser.current_url <NEW_LINE> self.quit_browser() <NEW_LINE> self.init_browser() <NEW_LINE> self.browser.get(url) <NEW_LINE> self.wait_for(lambda: self.assertIn( '买一些孔雀羽毛', self.browser.find_element_by_id('id_list_table').text )) <NEW_LINE> self.wait_for(lambda: self.assertIn( '用孔雀羽毛做假蝇', self.browser.find_element_by_id('id_list_table').text ))
显示清单测试
6259905a63b5f9789fe8674a
class FilterWindowed(deeding.DeedLapse): <NEW_LINE> <INDENT> Ioinits = odict( group = 'filter.sensor.generic', output = 'state.generic', input = 'ctd', field = 'generic', depth = 'state.depth', parms = dict(window = 60.0, frac = 0.9, preload = 30.0, layer = 40.0, tolerance = 5.0)) <NEW_LINE> def __init__(self, **kw): <NEW_LINE> <INDENT> super(FilterWindowed,self).__init__(**kw) <NEW_LINE> <DEDENT> def _initio(self, group, output, input, field, depth, parms = None, **kw): <NEW_LINE> <INDENT> self.group = group <NEW_LINE> self.parm = self.store.create(group + '.parm') <NEW_LINE> if not parms: <NEW_LINE> <INDENT> parms = dict(window = 60.0, frac = 0.9, layer = 40.0, tolerance = 5.0, preload = 30.0) <NEW_LINE> <DEDENT> parms['window'] = abs(parms['window']) <NEW_LINE> parms['frac'] = max(0.0, min(1.0, parms['frac'])) <NEW_LINE> parms['tolerance'] = abs(parms['tolerance']) <NEW_LINE> self.parm.create(**parms) <NEW_LINE> self.elapsed = self.store.create(group + '.elapsed').create(value = 0.0) <NEW_LINE> preload = self.parm.data.preload <NEW_LINE> self.output = self.store.create(output).update(value = preload) <NEW_LINE> self.input = self.store.create(input) <NEW_LINE> self.field = field <NEW_LINE> self.input.create({self.field : 0.0}) <NEW_LINE> self.depth = self.store.create(depth).create(value = 0.0) <NEW_LINE> <DEDENT> def restart(self): <NEW_LINE> <INDENT> self.stamp = self.store.stamp <NEW_LINE> self.lapse = 0.0 <NEW_LINE> if self.output.stamp is None: <NEW_LINE> <INDENT> preload = self.parm.data.preload <NEW_LINE> self.output.value = preload <NEW_LINE> <DEDENT> <DEDENT> def action(self, **kw): <NEW_LINE> <INDENT> depth = self.depth.value <NEW_LINE> layer = self.parm.data.layer <NEW_LINE> tolerance = self.parm.data.tolerance <NEW_LINE> if (depth < (layer - tolerance)) or (depth > (layer + tolerance)): <NEW_LINE> <INDENT> preload = self.parm.data.preload <NEW_LINE> if self.output.value != preload: <NEW_LINE> <INDENT> self.output.value = preload <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> super(FilterWindowed,self).action(**kw) <NEW_LINE> self.elapsed.value = self.lapse <NEW_LINE> raw = self.input[self.field] <NEW_LINE> if self.lapse <= 0.0: <NEW_LINE> <INDENT> self.output.value = raw <NEW_LINE> return <NEW_LINE> <DEDENT> old = self.output.value <NEW_LINE> window = self.parm.data.window <NEW_LINE> frac = self.parm.data.frac <NEW_LINE> w = max( 1.0, window / self.lapse) <NEW_LINE> equiv = min(1.0, w * frac) <NEW_LINE> g = 1.0 - (1.0 - frac) ** (1.0/equiv) <NEW_LINE> new = (1.0 - g) * old + g * raw <NEW_LINE> self.output.value = new <NEW_LINE> <DEDENT> def _expose(self): <NEW_LINE> <INDENT> print("FilterWindowed %s stamp = %s lapse = %0.3f" % (self.name, self.stamp,self.lapse)) <NEW_LINE> format = "output = %0.3f window = %0.3f frac = %0.3f" <NEW_LINE> print(format % (self.output.value, self.parm.data.window, self.parm.data.frac))
Class
6259905a379a373c97d9a5fc
class Attachment(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'attachments' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> issue_id = db.Column(db.ForeignKey(u'issues.id'), index=True) <NEW_LINE> image_url = db.Column(db.Text) <NEW_LINE> issue = db.relationship(u'Issue') <NEW_LINE> def get_thumbnail_url(self): <NEW_LINE> <INDENT> head, tail = os.path.split(self.image_url) <NEW_LINE> thumb_name = "thumb-{}".format(tail) <NEW_LINE> return "{}/{}".format(head, thumb_name) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> db.session.delete(self) <NEW_LINE> db.session.commit() <NEW_LINE> delete_file(self.image_url) <NEW_LINE> delete_file(self.get_thumbnail_url()) <NEW_LINE> directory_path = os.path.abspath(os.path.join( current_app.config['MEDIA_FOLDER'], self.image_url, os.pardir)) <NEW_LINE> if os.path.exists(directory_path) and not os.listdir(directory_path): <NEW_LINE> <INDENT> os.rmdir(directory_path) <NEW_LINE> <DEDENT> <DEDENT> def get_full_thumbnail_url(self): <NEW_LINE> <INDENT> url = self.get_thumbnail_url() <NEW_LINE> if current_app.config.get('MEDIA_URL'): <NEW_LINE> <INDENT> return current_app.config['MEDIA_URL'] + url <NEW_LINE> <DEDENT> return '/media/' + url
Attachment table in the database.
6259905aa8ecb033258727ef
class GdalImage: <NEW_LINE> <INDENT> def __init__(self, gdaldataset): <NEW_LINE> <INDENT> self.dataset = gdaldataset <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.dataset = None <NEW_LINE> <DEDENT> def read_band(self, band_idx, subset=None): <NEW_LINE> <INDENT> if band_idx < 1 or band_idx > self.dataset.RasterCount: <NEW_LINE> <INDENT> raise IndexError("band index is out of range") <NEW_LINE> <DEDENT> band = self.dataset.GetRasterBand(band_idx) <NEW_LINE> if subset is None: <NEW_LINE> <INDENT> data = band.ReadAsArray(0, 0, band.XSize, band.YSize) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = band.ReadAsArray(subset[0], subset[1], subset[2], subset[3]) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def XSize(self): <NEW_LINE> <INDENT> return self.dataset.RasterXSize if self.dataset else None <NEW_LINE> <DEDENT> def YSize(self): <NEW_LINE> <INDENT> return self.dataset.RasterYSize if self.dataset else None <NEW_LINE> <DEDENT> def get_raster_nodata_value(self, band_idx=1): <NEW_LINE> <INDENT> if band_idx < 1 or band_idx > self.dataset.RasterCount: <NEW_LINE> <INDENT> raise IndexError("band index is out of range") <NEW_LINE> <DEDENT> return self.dataset.GetRasterBand(band_idx).GetNoDataValue() <NEW_LINE> <DEDENT> def read_all_band(self): <NEW_LINE> <INDENT> raise NotImplementedError("sorry, read_all_band has not been implemented!") <NEW_LINE> <DEDENT> def get_raster_dtype(self, band_idx=1): <NEW_LINE> <INDENT> if band_idx < 1 or band_idx > self.dataset.RasterCount: <NEW_LINE> <INDENT> raise IndexError("band index is out of range") <NEW_LINE> <DEDENT> return self.dataset.GetRasterBand(band_idx).DataType <NEW_LINE> <DEDENT> def geotransform(self): <NEW_LINE> <INDENT> return self.dataset and self.dataset.GetGeoTransform() or None <NEW_LINE> <DEDENT> def projection(self): <NEW_LINE> <INDENT> return self.dataset and self.dataset.GetProjection() or None <NEW_LINE> <DEDENT> def band_count(self): <NEW_LINE> <INDENT> return self.dataset and self.dataset.RasterCount or None <NEW_LINE> <DEDENT> def get_extent(self): <NEW_LINE> <INDENT> geot = self.geotransform() <NEW_LINE> return (geot[0], geot[3] + self.YSize() * geot[5], geot[0] + self.XSize() * geot[1], geot[3])
A sample class to access a image with GDAL library
6259905ab57a9660fecd3053
class VoiceEqualityNumberOfNotesFeature(featuresModule.FeatureExtractor): <NEW_LINE> <INDENT> id = 'T4' <NEW_LINE> def __init__(self, dataOrStream=None, *arguments, **keywords): <NEW_LINE> <INDENT> super().__init__(dataOrStream=dataOrStream, *arguments, **keywords) <NEW_LINE> self.name = 'Voice Equality - Number of Notes' <NEW_LINE> self.description = ('Standard deviation of the total number of Note Ons ' + 'in each channel that contains at least one note.') <NEW_LINE> self.isSequential = True <NEW_LINE> self.dimensions = 1
Not implemented TODO: implement Standard deviation of the total number of Note Ons in each channel that contains at least one note.
6259905aac7a0e7691f73aba
class RBiomartr(RPackage): <NEW_LINE> <INDENT> homepage = "https://docs.ropensci.org/biomartr" <NEW_LINE> url = "https://cloud.r-project.org/src/contrib/biomartr_0.9.2.tar.gz" <NEW_LINE> list_url = "https://cloud.r-project.org/src/contrib/Archive/biomartr" <NEW_LINE> version('0.9.2', sha256='d88085696e9c5614828602254c33f2cdd3bbfeebc2f21a705eee3cb961097c89') <NEW_LINE> depends_on('r-biomart', type=('build', 'run')) <NEW_LINE> depends_on('r-biostrings', type=('build', 'run')) <NEW_LINE> depends_on('r-curl', type=('build', 'run')) <NEW_LINE> depends_on('r-tibble', type=('build', 'run')) <NEW_LINE> depends_on('r-jsonlite', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> depends_on('r-purrr', type=('build', 'run')) <NEW_LINE> depends_on('r-r-utils', type=('build', 'run')) <NEW_LINE> depends_on('r-philentropy', type=('build', 'run')) <NEW_LINE> depends_on('[email protected]:', type=('build', 'run'))
Perform large scale genomic data retrieval and functional annotation retrieval. This package aims to provide users with a standardized way to automate genome, proteome, 'RNA', coding sequence ('CDS'), 'GFF', and metagenome retrieval from 'NCBI RefSeq', 'NCBI Genbank', 'ENSEMBL', 'ENSEMBLGENOMES', and 'UniProt' databases. Furthermore, an interface to the 'BioMart' database (Smedley et al. (2009) <doi:10.1186/1471-2164-10-22>) allows users to retrieve functional annotation for genomic loci. In addition, users can download entire databases such as 'NCBI RefSeq' (Pruitt et al. (2007) <doi:10.1093/nar/gkl842>), 'NCBI nr', 'NCBI nt', 'NCBI Genbank' (Benson et al. (2013) <doi:10.1093/nar/gks1195>), etc. as well as 'ENSEMBL' and 'ENSEMBLGENOMES' with only one command.
6259905a99cbb53fe68324b8
class PlaneWall(Resistance): <NEW_LINE> <INDENT> def __init__(self, Material, Node1, Node2, L1, L2, A): <NEW_LINE> <INDENT> super().__init__(Material, Node1, Node2) <NEW_LINE> self.L1 = L1 <NEW_LINE> self.L2 = L2 <NEW_LINE> self.A = A <NEW_LINE> <DEDENT> @property <NEW_LINE> def resistance_value(self): <NEW_LINE> <INDENT> return (self.L2 - self.L1) / (self.Material.k * self.A)
Material,Node1,Node2,L1,L2,A
6259905a30dc7b76659a0d6c
class Actor(): <NEW_LINE> <INDENT> def __init__(self,state_size,action_size,action_low,action_high): <NEW_LINE> <INDENT> self.state_size = state_size <NEW_LINE> self.action_size = action_size <NEW_LINE> self.action_high = action_high <NEW_LINE> self.action_low = action_low <NEW_LINE> self.action_range = self.action_high-self.action_low <NEW_LINE> self.build_model() <NEW_LINE> <DEDENT> def build_model(self): <NEW_LINE> <INDENT> states = layers.Input(shape=(self.state_size,),name='states') <NEW_LINE> net = layers.Dense(units=400,kernel_regularizer=layers.regularizers.l2(1e-6))(states) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation("relu")(net) <NEW_LINE> net = layers.Dense(units=300,kernel_regularizer=layers.regularizers.l2(1e-6))(net) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation("relu")(net) <NEW_LINE> raw_actions = layers.Dense(units=self.action_size,activation='sigmoid', name='raw_actions',kernel_initializer=layers.initializers.RandomUniform(minval=-0.003, maxval=0.003))(net) <NEW_LINE> actions = layers.Lambda(lambda x: (x*self.action_range)+self.action_low, name='actions')(raw_actions) <NEW_LINE> self.model = models.Model(inputs=states,outputs=actions) <NEW_LINE> action_gradients = layers.Input(shape=(self.action_size,)) <NEW_LINE> loss = K.mean(-action_gradients*actions) <NEW_LINE> optimizer = optimizers.Adam(lr = 0.0001) <NEW_LINE> updates_op = optimizer.get_updates(params=self.model.trainable_weights, loss=loss) <NEW_LINE> self.train_fn = K.function( inputs=[self.model.input, action_gradients, K.learning_phase()], outputs=[], updates=updates_op)
Actor (Policy) Model
6259905ad99f1b3c44d06c79
class ValidationError(Exception): <NEW_LINE> <INDENT> pass
A generic validation error.
6259905a56b00c62f0fb3ea4
class django_disabled(uh.StaticHandler): <NEW_LINE> <INDENT> name = "django_disabled" <NEW_LINE> @classmethod <NEW_LINE> def identify(cls, hash): <NEW_LINE> <INDENT> if not hash: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if isinstance(hash, bytes): <NEW_LINE> <INDENT> return hash == b("!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return hash == u"!" <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def genhash(cls, secret, config): <NEW_LINE> <INDENT> if secret is None: <NEW_LINE> <INDENT> raise TypeError("no secret provided") <NEW_LINE> <DEDENT> return to_hash_str(u"!") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def verify(cls, secret, hash): <NEW_LINE> <INDENT> if not cls.identify(hash): <NEW_LINE> <INDENT> raise ValueError("invalid django-disabled hash") <NEW_LINE> <DEDENT> return False
This class provides disabled password behavior for Django, and follows the :ref:`password-hash-api`. This class does not implement a hash, but instead claims the special hash string ``"!"`` which Django uses to indicate an account's password has been disabled. * newly encrypted passwords will hash to ``!``. * it rejects all passwords.
6259905a32920d7e50bc761f
class BsdfComponentExtension(bsdf.Extension): <NEW_LINE> <INDENT> name = 'flexx.app.component' <NEW_LINE> cls = BaseAppComponent <NEW_LINE> def match(self, s, c): <NEW_LINE> <INDENT> return isinstance(c, self.cls) <NEW_LINE> <DEDENT> def encode(self, s, c): <NEW_LINE> <INDENT> if isinstance(c, PyComponent): <NEW_LINE> <INDENT> c._ensure_proxy_instance() <NEW_LINE> <DEDENT> return dict(session_id=c._session.id, id=c._id) <NEW_LINE> <DEDENT> def decode(self, s, d): <NEW_LINE> <INDENT> c = None <NEW_LINE> session = manager.get_session_by_id(d['session_id']) <NEW_LINE> if session is None: <NEW_LINE> <INDENT> session = object() <NEW_LINE> session.id = d['session_id'] <NEW_LINE> c = StubComponent(session, d['id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> c = session.get_component_instance(d['id']) <NEW_LINE> if c is None: <NEW_LINE> <INDENT> logger.warning('Using stub component for %s.' % d['id']) <NEW_LINE> c = StubComponent(session, d['id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> session.keep_alive(c) <NEW_LINE> <DEDENT> <DEDENT> return c <NEW_LINE> <DEDENT> def match_js(self, s, c): <NEW_LINE> <INDENT> return isinstance(c, BaseAppComponent) <NEW_LINE> <DEDENT> def encode_js(self, s, c): <NEW_LINE> <INDENT> if isinstance(c, JsComponent): <NEW_LINE> <INDENT> c._ensure_proxy_instance() <NEW_LINE> <DEDENT> return dict(session_id=c._session.id, id=c._id) <NEW_LINE> <DEDENT> def decode_js(self, s, d): <NEW_LINE> <INDENT> c = None <NEW_LINE> session = window.flexx.sessions.get(d['session_id'], None) <NEW_LINE> if session is None: <NEW_LINE> <INDENT> session = dict(id=d['session_id']) <NEW_LINE> c = StubComponent(session, d['id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> c = session.get_component_instance(d['id']) <NEW_LINE> if c is None: <NEW_LINE> <INDENT> logger.warning('Using stub component for %s.' % d['id']) <NEW_LINE> c = StubComponent(session, d['id']) <NEW_LINE> <DEDENT> <DEDENT> return c
A BSDF extension to encode flexx.app Component objects based on their session id and component id.
6259905a627d3e7fe0e08466
class FileSPIRVShader(PlaceHolder): <NEW_LINE> <INDENT> def __init__(self, source, suffix, assembly_substr=None): <NEW_LINE> <INDENT> assert isinstance(source, str) <NEW_LINE> assert isinstance(suffix, str) <NEW_LINE> self.source = source <NEW_LINE> self.suffix = suffix <NEW_LINE> self.filename = None <NEW_LINE> self.assembly_substr = assembly_substr <NEW_LINE> <DEDENT> def instantiate_for_spirv_args(self, testcase): <NEW_LINE> <INDENT> shader, asm_filename = tempfile.mkstemp( dir=testcase.directory, suffix=self.suffix) <NEW_LINE> shader_object = os.fdopen(shader, 'w') <NEW_LINE> shader_object.write(self.source) <NEW_LINE> shader_object.close() <NEW_LINE> self.filename = '%s.spv' % asm_filename <NEW_LINE> cmd = [ testcase.test_manager.assembler_path, asm_filename, '-o', self.filename ] <NEW_LINE> process = subprocess.Popen( args=cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=testcase.directory) <NEW_LINE> output = process.communicate() <NEW_LINE> assert process.returncode == 0 and not output[0] and not output[1] <NEW_LINE> return self.filename <NEW_LINE> <DEDENT> def instantiate_for_expectation(self, testcase): <NEW_LINE> <INDENT> assert self.filename is not None <NEW_LINE> return self.filename
Stands for a source shader file which must be converted to SPIR-V.
6259905a16aa5153ce401abc
class StatusView(ViewSet): <NEW_LINE> <INDENT> def list(self, request): <NEW_LINE> <INDENT> statuses = Status.objects.all() <NEW_LINE> serializer = StatusSerializer( statuses, many=True, context={'request': None}) <NEW_LINE> return Response(serializer.data)
Handle GET requests to list all statuses
6259905a16aa5153ce401abd
class SentPageMessageReply(LoginRequiredMixin, CreateView): <NEW_LINE> <INDENT> form_class = ReplyForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> particular_message = get_object_or_404(DefuseMessage, pk=self.kwargs['pk']) <NEW_LINE> receiver = get_object_or_404(User, username=particular_message.receiver.username) <NEW_LINE> quest = get_object_or_404(Quest, slug=particular_message.related_quest.slug) <NEW_LINE> self.object = form.save(commit=False) <NEW_LINE> self.object.sender = self.request.user <NEW_LINE> self.object.receiver = receiver <NEW_LINE> self.object.related_quest = quest <NEW_LINE> if particular_message.parent: <NEW_LINE> <INDENT> self.object.parent = particular_message.parent <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.object.parent = particular_message <NEW_LINE> <DEDENT> self.object.subject = "Re: " + particular_message.subject <NEW_LINE> self.object.save() <NEW_LINE> return super().form_valid(form) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> if 'pk' in self.kwargs: <NEW_LINE> <INDENT> pk = self.kwargs['pk'] <NEW_LINE> <DEDENT> return reverse_lazy('dashboard:message_detail', kwargs={'pk': pk})
Reply to a specific message
6259905a99cbb53fe68324b9
class ListPolicy(lister.Lister): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + '.ListPolicy') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(ListPolicy, self).get_parser(prog_name) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = self.app.client_manager.congressclient <NEW_LINE> data = client.list_policy()['results'] <NEW_LINE> columns = ['id', 'name', 'owner_id', 'kind', 'description'] <NEW_LINE> formatters = {'Policies': utils.format_list} <NEW_LINE> return (columns, (utils.get_dict_properties(s, columns, formatters=formatters) for s in data))
List Policy.
6259905a6e29344779b01c26
class Block(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, name, block_id, position): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._block_id = block_id <NEW_LINE> self._position = position <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def block_id(self): <NEW_LINE> <INDENT> return self._block_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self): <NEW_LINE> <INDENT> return self._position <NEW_LINE> <DEDENT> @position.setter <NEW_LINE> def position(self, position): <NEW_LINE> <INDENT> self._position = position <NEW_LINE> <DEDENT> @abstractclassmethod <NEW_LINE> def display(self, gamer, data, dice_result): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractclassmethod <NEW_LINE> def change_value(self, rate): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getJSon(self): <NEW_LINE> <INDENT> json_data = { "name": self._name, "block_id": self._block_id, "position": self._position, } <NEW_LINE> return json_data
Abstract class: Block
6259905a097d151d1a2c2646
class PreciseTimer(Timer): <NEW_LINE> <INDENT> __slots__ = ('format_string',) <NEW_LINE> TIME_SENSITIVE = True <NEW_LINE> def __init__(self, format='Elapsed Time: %s'): <NEW_LINE> <INDENT> self.format_string = format <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def format_time(seconds): <NEW_LINE> <INDENT> return str(datetime.timedelta(seconds=seconds)) <NEW_LINE> <DEDENT> def update(self, pbar): <NEW_LINE> <INDENT> return self.format_string % self.format_time(pbar.seconds_elapsed)
Widget which displays the elapsed seconds.
6259905a07d97122c421827e
class Collectors(ListCollector): <NEW_LINE> <INDENT> def collect(self, component): <NEW_LINE> <INDENT> collectors = component.get_export('collectors', []) <NEW_LINE> for collector in collectors: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> collector = component.get_object(collector) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> logging.exception('Could not import collector: %s', collector) <NEW_LINE> continue <NEW_LINE> <DEDENT> hasmeth = functools.partial(hasmethod, collector) <NEW_LINE> if not all([hasmeth('install'), hasmeth('collectall')]): <NEW_LINE> <INDENT> logging.error('Invalid API for collector {}'.format(collector)) <NEW_LINE> continue <NEW_LINE> <DEDENT> self.register(collector) <NEW_LINE> <DEDENT> <DEDENT> def install_member(self, collector): <NEW_LINE> <INDENT> exts.exports.add_collector(collector)
This class handles member group collector exports. The member group collectors should be :py:class:`Collector` subclasses that manage individual groups.
6259905a07f4c71912bb0a14
class MessageAnnouncer: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.listeners = [] <NEW_LINE> <DEDENT> def listen(self): <NEW_LINE> <INDENT> q = queue.Queue(maxsize=5) <NEW_LINE> self.listeners.append(q) <NEW_LINE> return q <NEW_LINE> <DEDENT> def announce(self, msg): <NEW_LINE> <INDENT> for i in reversed(range(len(self.listeners))): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.listeners[i].put_nowait(msg) <NEW_LINE> <DEDENT> except queue.Full: <NEW_LINE> <INDENT> del self.listeners[i]
See https://maxhalford.github.io/blog/flask-sse-no-deps/
6259905ad486a94d0ba2d5a2
class PushChannel: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def factory(opts, **kwargs): <NEW_LINE> <INDENT> return SyncWrapper( AsyncPushChannel.factory, (opts,), kwargs, loop_kwarg="io_loop", ) <NEW_LINE> <DEDENT> def send(self, load, tries=3, timeout=60): <NEW_LINE> <INDENT> raise NotImplementedError()
Factory class to create Sync channel for push side of push/pull IPC
6259905a73bcbd0ca4bcb86d
class FileFinder(HTMLParser): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> HTMLParser.__init__(self) <NEW_LINE> self.fileLinks, self.dirLinks = [], [] <NEW_LINE> self.basePath = self._getParent(urlparse.urlparse(url).path) <NEW_LINE> if not self.basePath.endswith('/'): <NEW_LINE> <INDENT> self.basePath += '/' <NEW_LINE> <DEDENT> <DEDENT> def _getParent(self, path): <NEW_LINE> <INDENT> parent = '/'.join(path.split('/')[:-2]) <NEW_LINE> return parent if parent else '/' <NEW_LINE> <DEDENT> def handle_starttag(self, tag, attrs): <NEW_LINE> <INDENT> if tag == 'a': <NEW_LINE> <INDENT> attrs = dict(attrs) <NEW_LINE> link = attrs.get('href', None) <NEW_LINE> if link: <NEW_LINE> <INDENT> if link.endswith('/'): <NEW_LINE> <INDENT> if link != self.basePath: <NEW_LINE> <INDENT> self.dirLinks.append(link[:-1]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.fileLinks.append(link)
Parser for an Apache index page, gathering all links to files and subdirectories.
6259905a63b5f9789fe8674c
class SubmissionsTrelloClient(Trello): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> with app.app_context(): <NEW_LINE> <INDENT> super().__init__( api_key=app.config["TRELLO_API_KEY"], token=app.config["TRELLO_API_TOKEN"], ) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> @functools.lru_cache() <NEW_LINE> def board(self): <NEW_LINE> <INDENT> with app.app_context(): <NEW_LINE> <INDENT> return self.get_board(app.config["TRELLO_BOARD"]) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> @functools.lru_cache() <NEW_LINE> def new_submissions_list(self): <NEW_LINE> <INDENT> list_id = TrelloList().first(list_symbolic_name="NEW").list_id <NEW_LINE> return self.board.get_list(list_id) <NEW_LINE> <DEDENT> @property <NEW_LINE> @functools.lru_cache() <NEW_LINE> def labels(self): <NEW_LINE> <INDENT> with app.app_context(): <NEW_LINE> <INDENT> known_label_captions = { options["default_caption"]: common_name for label_group in app.config["DEFAULT_TRELLO_LABELS"].values() for common_name, options in label_group.items() } <NEW_LINE> <DEDENT> labels_by_common_name = {} <NEW_LINE> for trello_label in self.board.get_labels(): <NEW_LINE> <INDENT> common_name = known_label_captions.get(trello_label.name, None) <NEW_LINE> if common_name: <NEW_LINE> <INDENT> labels_by_common_name[common_name] = trello_label.id <NEW_LINE> <DEDENT> <DEDENT> return labels_by_common_name
Wrap the base Trello client with one that understands the business logic of the Submissions app.
6259905a76e4537e8c3f0b66
class DictItem(MutableMapping, BaseItem): <NEW_LINE> <INDENT> fields = {} <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._values = {} <NEW_LINE> if args or kwargs: <NEW_LINE> <INDENT> for k, v in six.iteritems(dict(*args, **kwargs)): <NEW_LINE> <INDENT> self[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._values[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if key in self.fields: <NEW_LINE> <INDENT> self._values[key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise KeyError("{} does not set this field: {}".format(self.__class__.__name__, key)) <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self._values[key] <NEW_LINE> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> if item in self.fields: <NEW_LINE> <INDENT> raise AttributeError('Use item[{}] to get field value'.format(item)) <NEW_LINE> <DEDENT> raise AttributeError(item) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._values) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._values) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._values.keys()
https://github.com/scrapy/scrapy/blob/master/scrapy/item.py
6259905a379a373c97d9a5fe
class TransRamHsW_addr(TransRamHsR_addr): <NEW_LINE> <INDENT> def _config(self): <NEW_LINE> <INDENT> TransRamHsR_addr._config(self) <NEW_LINE> self.USE_FLUSH = Param(True) <NEW_LINE> <DEDENT> def _declr(self): <NEW_LINE> <INDENT> TransRamHsR_addr._declr(self) <NEW_LINE> if(self.USE_FLUSH == True): <NEW_LINE> <INDENT> self.flush = Signal() <NEW_LINE> <DEDENT> <DEDENT> def _initSimAgent(self, sim:HdlSimulator): <NEW_LINE> <INDENT> self._ag = UniversalHandshakedAgent(sim, self)
.. hwt-autodoc::
6259905a8e7ae83300eea667
class ipaddr(nla_base_string): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> sql_type = 'TEXT' <NEW_LINE> def ft_encode(self, offset): <NEW_LINE> <INDENT> if self.value.find(':') > -1: <NEW_LINE> <INDENT> family = AF_INET6 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> family = AF_INET <NEW_LINE> <DEDENT> self['value'] = inet_pton(family, self.value) <NEW_LINE> return nla_base_string.ft_encode(self, offset) <NEW_LINE> <DEDENT> def ft_decode(self, offset): <NEW_LINE> <INDENT> nla_base_string.ft_decode(self, offset) <NEW_LINE> if self.length > 8: <NEW_LINE> <INDENT> family = AF_INET6 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> family = AF_INET <NEW_LINE> <DEDENT> self.value = inet_ntop(family, self['value'])
This class is used to decode IP addresses according to the family. Socket library currently supports only two families, AF_INET and AF_INET6. We do not specify here the string size, it will be calculated in runtime.
6259905afff4ab517ebcedfe
class HTTPNotFound(HTTPError): <NEW_LINE> <INDENT> def __init__(self, title='Not found', description=None): <NEW_LINE> <INDENT> super(HTTPNotFound, self).__init__(nfw.HTTP_404, title, description)
404 Not Found.
6259905a0c0af96317c5784c
class MSRulerData(object): <NEW_LINE> <INDENT> def addGuideWithValue(self, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def numberOfGuides(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def guideAtIndex(self, index): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def removeGuideAtIndex(self, index): <NEW_LINE> <INDENT> pass
Stores the guides used on its ruler. MSPage and MSArtboardGroup both return their ruler data using horizontalRulerData and verticalRulerData.
6259905a3539df3088ecd876
class SiteDef: <NEW_LINE> <INDENT> def __init__(self, n, ss=[]): <NEW_LINE> <INDENT> self.name = n <NEW_LINE> self.state_list = ss <NEW_LINE> <DEDENT> def write_as_bngl(self): <NEW_LINE> <INDENT> if self.state_list: <NEW_LINE> <INDENT> return "%s~%s" % (self.name, '~'.join(self.state_list)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> <DEDENT> def write_as_kappa(self): <NEW_LINE> <INDENT> if self.state_list: <NEW_LINE> <INDENT> return "%s{%s}" % (self.name, ','.join(self.state_list)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if not self.state_list: <NEW_LINE> <INDENT> return "SiteDef(%s)" % self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "SiteDef(%s: %s)" % (self.name, ','.join(self.state_list))
A site definition composed of a name and a finite set of states
6259905ab5575c28eb7137b9
class Meta(object): <NEW_LINE> <INDENT> model = models.Template
Django instruction to link the form to UserGroup model.
6259905a097d151d1a2c2647
class LineageFetcher(object): <NEW_LINE> <INDENT> def __init__(self, db=None, cursor=None): <NEW_LINE> <INDENT> self._db = db or getDatabaseConnection() <NEW_LINE> self._cursor = cursor or self._db.cursor() <NEW_LINE> self._cache = {} <NEW_LINE> <DEDENT> def lineage(self, title): <NEW_LINE> <INDENT> if title in self._cache: <NEW_LINE> <INDENT> return self._cache[title] <NEW_LINE> <DEDENT> lineage = [] <NEW_LINE> gi = int(title.split('|')[1]) <NEW_LINE> query = 'SELECT taxID FROM gi_taxid WHERE gi = %d' % gi <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self._cursor.execute(query) <NEW_LINE> taxID = self._cursor.fetchone()[0] <NEW_LINE> if taxID == 1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> query = 'SELECT name FROM names WHERE taxId = %s' % taxID <NEW_LINE> self._cursor.execute(query) <NEW_LINE> scientificName = self._cursor.fetchone()[0] <NEW_LINE> lineage.append((taxID, scientificName)) <NEW_LINE> query = ('SELECT parent_taxID FROM nodes WHERE taxID = %s' % taxID) <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> lineage = [] <NEW_LINE> <DEDENT> self._cache[title] = lineage <NEW_LINE> return lineage <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._cursor.close() <NEW_LINE> self._db.close() <NEW_LINE> self._cursor = self._db = self._cache = None
Provide access to the NCBI taxonomy database so we can retrieve the lineage of title sequences hit by BLAST.
6259905a009cb60464d02b0f
class Ec2Instance(object): <NEW_LINE> <INDENT> def __init__(self, instance_id, instance_ip, tags): <NEW_LINE> <INDENT> self.instance_id = instance_id <NEW_LINE> self.instance_ip = instance_ip <NEW_LINE> self.tags = {} <NEW_LINE> for tag in tags: <NEW_LINE> <INDENT> self.tags[tag['Key']] = tag['Value'] <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s, %s, %s" % (self.instance_id, self.instance_ip, repr(self.tags)) <NEW_LINE> <DEDENT> def has_app(self, app): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> appstr = self.tags['Apps'] <NEW_LINE> applist = appstr.split(',') <NEW_LINE> return app in applist <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False
Contains configuration info for an EC2 instance, including the instance ID, IP address and tags
6259905a30dc7b76659a0d6d
class WordsegBuild(distutils.command.build.build): <NEW_LINE> <INDENT> targets = ['dpseg'] <NEW_LINE> def run(self): <NEW_LINE> <INDENT> distutils.command.build.build.run(self) <NEW_LINE> if on_readthedocs(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for target in self.targets: <NEW_LINE> <INDENT> build_dir = os.path.join('build', target) <NEW_LINE> print('compiling C++ dependencies for', target, 'in', build_dir) <NEW_LINE> if not os.path.exists(build_dir): <NEW_LINE> <INDENT> os.makedirs(build_dir) <NEW_LINE> <DEDENT> env = os.environ.copy() <NEW_LINE> env['BUILDDIR'] = os.path.abspath(build_dir) <NEW_LINE> subprocess.call( ['make'], env=env, cwd=os.path.join('wordseg', 'algos', target)) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def bin_targets(cls): <NEW_LINE> <INDENT> return ([] if on_readthedocs() else ['build/{}/{}'.format(t, t) for t in cls.targets]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def dpseg_config_files(cls): <NEW_LINE> <INDENT> config_dir = os.path.join('wordseg', 'algos', 'dpseg', 'config') <NEW_LINE> return [os.path.join(config_dir, f) for f in os.listdir(config_dir)]
Compile the C++ code needed by wordseg
6259905a2ae34c7f260ac6c2
class HostPrepCommand(TemareCommand): <NEW_LINE> <INDENT> def __init__(self, base): <NEW_LINE> <INDENT> TemareCommand.__init__(self, base) <NEW_LINE> self.failed = 0 <NEW_LINE> self.names = ['hostprep'] <NEW_LINE> self.usage = 'HOSTNAME...' <NEW_LINE> self.summary = 'Prepare and start testruns on the specified hosts' <NEW_LINE> self.description = ' HOSTNAME Name of the host' <NEW_LINE> <DEDENT> def do_command(self, args): <NEW_LINE> <INDENT> hostlist = [] <NEW_LINE> threads = [] <NEW_LINE> environment = '' <NEW_LINE> getenv = '(grep -q "^kvm " /proc/modules && echo "kvm") || ' '(/usr/sbin/xend status >/dev/null 2>&1 && echo "xen") || ' 'echo "bare"' <NEW_LINE> if len(args) == 0: <NEW_LINE> <INDENT> raise ValueError('No arguments given.') <NEW_LINE> <DEDENT> sys.stdout.write( 'Starting to prepare hosts. ' 'Please wait, this may take a while...\n') <NEW_LINE> for host in args: <NEW_LINE> <INDENT> host = chk_hostname(host) <NEW_LINE> if host not in hostlist: <NEW_LINE> <INDENT> hostlist.append(host) <NEW_LINE> process = Popen(['/usr/bin/ssh', '-o PasswordAuthentication=no', 'root@%s' % (host, ), getenv], stderr=None, stdout=PIPE) <NEW_LINE> retval = process.wait() <NEW_LINE> if retval == 0: <NEW_LINE> <INDENT> output = process.communicate()[0].strip().split('\n') <NEW_LINE> if len(output) == 1 and output[0] in ('xen', 'kvm'): <NEW_LINE> <INDENT> environment = output[0] <NEW_LINE> <DEDENT> <DEDENT> if environment == 'xen': <NEW_LINE> <INDENT> threads.append(preparation.XenHostPreparation(self, host)) <NEW_LINE> <DEDENT> elif environment == 'kvm': <NEW_LINE> <INDENT> threads.append(preparation.KvmHostPreparation(self, host)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.failed = 1 <NEW_LINE> sys.stderr.write( 'Preparation of host %s failed\n' 'Reason:\n' 'Could not determine the test environment.\n' % (host, )) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for thread in threads: <NEW_LINE> <INDENT> thread.start() <NEW_LINE> <DEDENT> for thread in threads: <NEW_LINE> <INDENT> thread.join() <NEW_LINE> <DEDENT> if self.failed == 1: <NEW_LINE> <INDENT> raise ValueError('Preparation of some hosts failed.')
Output guest configurations for a new test run on a given host
6259905a0fa83653e46f64c0
class AzureFirewallApplicationRuleCollection(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'priority': {'maximum': 65000, 'minimum': 100}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'priority': {'key': 'properties.priority', 'type': 'int'}, 'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'}, 'rules': {'key': 'properties.rules', 'type': '[AzureFirewallApplicationRule]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewallApplicationRuleCollection, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = None <NEW_LINE> self.priority = kwargs.get('priority', None) <NEW_LINE> self.action = kwargs.get('action', None) <NEW_LINE> self.rules = kwargs.get('rules', None) <NEW_LINE> self.provisioning_state = None
Application rule collection resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :param priority: Priority of the application rule collection resource. :type priority: int :param action: The action type of a rule collection. :type action: ~azure.mgmt.network.v2019_02_01.models.AzureFirewallRCAction :param rules: Collection of rules used by a application rule collection. :type rules: list[~azure.mgmt.network.v2019_02_01.models.AzureFirewallApplicationRule] :ivar provisioning_state: The provisioning state of the resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2019_02_01.models.ProvisioningState
6259905a32920d7e50bc7621
class TestSwiftTelemetry(manager.SwiftScenarioTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def resource_setup(cls): <NEW_LINE> <INDENT> if not CONF.service_available.ceilometer: <NEW_LINE> <INDENT> skip_msg = ("%s skipped as ceilometer is not available" % cls.__name__) <NEW_LINE> raise cls.skipException(skip_msg) <NEW_LINE> <DEDENT> elif CONF.telemetry.too_slow_to_test: <NEW_LINE> <INDENT> skip_msg = "Ceilometer feature for fast work mysql is disabled" <NEW_LINE> raise cls.skipException(skip_msg) <NEW_LINE> <DEDENT> super(TestSwiftTelemetry, cls).resource_setup() <NEW_LINE> cls.telemetry_client = cls.manager.telemetry_client <NEW_LINE> <DEDENT> def _confirm_notifications(self, container_name, obj_name): <NEW_LINE> <INDENT> def _check_samples(): <NEW_LINE> <INDENT> _, results = self.telemetry_client.list_samples( 'storage.api.request') <NEW_LINE> LOG.debug('got samples %s', results) <NEW_LINE> containers = [sample['resource_metadata']['container'] for sample in results if sample['resource_metadata']['container'] != 'None'] <NEW_LINE> objects = [sample['resource_metadata']['object'] for sample in results if sample['resource_metadata']['object'] != 'None'] <NEW_LINE> return (containers and objects and container_name in containers and obj_name in objects) <NEW_LINE> <DEDENT> self.assertTrue(test.call_until_true(_check_samples, NOTIFICATIONS_WAIT, NOTIFICATIONS_SLEEP), 'Correct notifications were not received after ' '%s seconds.' % NOTIFICATIONS_WAIT) <NEW_LINE> <DEDENT> @test.services('object_storage', 'telemetry') <NEW_LINE> def test_swift_middleware_notifies(self): <NEW_LINE> <INDENT> container_name = self.create_container() <NEW_LINE> obj_name, _ = self.upload_object_to_container(container_name) <NEW_LINE> self._confirm_notifications(container_name, obj_name)
Test that swift uses the ceilometer middleware. * create container. * upload a file to the created container. * retrieve the file from the created container. * wait for notifications from ceilometer.
6259905a004d5f362081fadb
class MainPage(Page): <NEW_LINE> <INDENT> login_assert_text_loc = (By.CLASS_NAME, "alert-success") <NEW_LINE> def assert_login_text(self): <NEW_LINE> <INDENT> return self.find_element(self.login_assert_text_loc).text
主页
6259905a16aa5153ce401abf
class TitleInfo: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.value = '' <NEW_LINE> <DEDENT> def to_mods_element(self, parent_element): <NEW_LINE> <INDENT> top_level = ET.SubElement(parent_element, 'titleInfo') <NEW_LINE> return top_level
Holds identifier information
6259905aa219f33f346c7de0
class UserManager(BaseUserManager): <NEW_LINE> <INDENT> use_in_migrations = True <NEW_LINE> def _create_user(self, username, password, is_staff, **extra_fields): <NEW_LINE> <INDENT> now = timezone.now() <NEW_LINE> if not username: <NEW_LINE> <INDENT> raise ValueError('The given username must be set') <NEW_LINE> <DEDENT> user = self.model( username=username, is_staff=is_staff, is_active=True, date_joined=now, **extra_fields ) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_user(self, username, password=None, **extra_fields): <NEW_LINE> <INDENT> return self._create_user(username, password, False, **extra_fields) <NEW_LINE> <DEDENT> def create_superuser(self, username, password, **extra_fields): <NEW_LINE> <INDENT> return self._create_user(username, password, True, **extra_fields)
Email is required to create users by default, and the implementation here removes the email
6259905a4428ac0f6e659b17
class EncryptError(AnidbApiError): <NEW_LINE> <INDENT> pass
Encryption error, encrypted session cannot be established.
6259905a99cbb53fe68324bb
class ErrorProperties(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'message': 'str', 'fields': 'dict(str, object)' } <NEW_LINE> attribute_map = { 'type': 'type', 'message': 'message', 'fields': 'fields' } <NEW_LINE> def __init__(self, type=None, message=None, fields=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._message = None <NEW_LINE> self._fields = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if message is not None: <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> if fields is not None: <NEW_LINE> <INDENT> self.fields = fields <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @message.setter <NEW_LINE> def message(self, message): <NEW_LINE> <INDENT> self._message = message <NEW_LINE> <DEDENT> @property <NEW_LINE> def fields(self): <NEW_LINE> <INDENT> return self._fields <NEW_LINE> <DEDENT> @fields.setter <NEW_LINE> def fields(self, fields): <NEW_LINE> <INDENT> self._fields = fields <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ErrorProperties): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259905a07d97122c4218280
class GifConvertOptions(ImageConvertOptions): <NEW_LINE> <INDENT> swagger_types = { } <NEW_LINE> attribute_map = { } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> base = super(GifConvertOptions, self) <NEW_LINE> base.__init__(**kwargs) <NEW_LINE> self.swagger_types.update(base.swagger_types) <NEW_LINE> self.attribute_map.update(base.attribute_map) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GifConvertOptions): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Gif convert options
6259905a3cc13d1c6d466d1b
class GhostController: <NEW_LINE> <INDENT> def __init__(self, maxlen: int = 10): <NEW_LINE> <INDENT> self._queue: Deque[int] = deque(maxlen=maxlen) <NEW_LINE> self._learning_team: int = -1 <NEW_LINE> self._ghost_trainers: Dict[int, GhostTrainer] = {} <NEW_LINE> self._changed_training_team = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def get_learning_team(self) -> int: <NEW_LINE> <INDENT> return self._learning_team <NEW_LINE> <DEDENT> def should_reset(self) -> bool: <NEW_LINE> <INDENT> changed_team = self._changed_training_team <NEW_LINE> if self._changed_training_team: <NEW_LINE> <INDENT> self._changed_training_team = False <NEW_LINE> <DEDENT> return changed_team <NEW_LINE> <DEDENT> def subscribe_team_id(self, team_id: int, trainer: GhostTrainer) -> None: <NEW_LINE> <INDENT> if team_id not in self._ghost_trainers: <NEW_LINE> <INDENT> self._ghost_trainers[team_id] = trainer <NEW_LINE> if self._learning_team < 0: <NEW_LINE> <INDENT> self._learning_team = team_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._queue.append(team_id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def change_training_team(self, step: int) -> None: <NEW_LINE> <INDENT> self._queue.append(self._learning_team) <NEW_LINE> self._learning_team = self._queue.popleft() <NEW_LINE> logger.debug( "Learning team {} swapped on step {}".format(self._learning_team, step) ) <NEW_LINE> self._changed_training_team = True <NEW_LINE> <DEDENT> def compute_elo_rating_changes(self, rating: float, result: float) -> float: <NEW_LINE> <INDENT> opponent_rating: float = 0.0 <NEW_LINE> for team_id, trainer in self._ghost_trainers.items(): <NEW_LINE> <INDENT> if team_id != self._learning_team: <NEW_LINE> <INDENT> opponent_rating = trainer.get_opponent_elo() <NEW_LINE> <DEDENT> <DEDENT> r1 = pow(10, rating / 400) <NEW_LINE> r2 = pow(10, opponent_rating / 400) <NEW_LINE> summed = r1 + r2 <NEW_LINE> e1 = r1 / summed <NEW_LINE> change = result - e1 <NEW_LINE> for team_id, trainer in self._ghost_trainers.items(): <NEW_LINE> <INDENT> if team_id != self._learning_team: <NEW_LINE> <INDENT> trainer.change_opponent_elo(change) <NEW_LINE> <DEDENT> <DEDENT> return change
GhostController contains a queue of team ids. GhostTrainers subscribe to the GhostController and query it to get the current learning team. The GhostController cycles through team ids every 'swap_interval' which corresponds to the number of trainer steps between changing learning teams. The GhostController is a unique object and there can only be one per training run.
6259905abaa26c4b54d50880
class DetectorDataset(utils.Dataset): <NEW_LINE> <INDENT> def __init__(self, image_fps, image_annotations, orig_height, orig_width): <NEW_LINE> <INDENT> super().__init__(self) <NEW_LINE> self.add_class('pneumonia', 1, 'Lung Opacity') <NEW_LINE> for i, fp in enumerate(image_fps): <NEW_LINE> <INDENT> annotations = image_annotations[fp] <NEW_LINE> self.add_image('pneumonia', image_id=i, path=fp, annotations=annotations, orig_height=orig_height, orig_width=orig_width) <NEW_LINE> <DEDENT> <DEDENT> def image_reference(self, image_id): <NEW_LINE> <INDENT> info = self.image_info[image_id] <NEW_LINE> return info['path'] <NEW_LINE> <DEDENT> def load_image(self, image_id): <NEW_LINE> <INDENT> info = self.image_info[image_id] <NEW_LINE> fp = info['path'] <NEW_LINE> ds = pydicom.read_file(fp) <NEW_LINE> image = ds.pixel_array <NEW_LINE> if len(image.shape) != 3 or image.shape[2] != 3: <NEW_LINE> <INDENT> image = np.stack((image,) * 3, -1) <NEW_LINE> <DEDENT> return image <NEW_LINE> <DEDENT> def load_mask(self, image_id): <NEW_LINE> <INDENT> info = self.image_info[image_id] <NEW_LINE> annotations = info['annotations'] <NEW_LINE> count = len(annotations) <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> mask = np.zeros((info['orig_height'], info['orig_width'], 1), dtype=np.uint8) <NEW_LINE> class_ids = np.zeros((1,), dtype=np.int32) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = np.zeros((info['orig_height'], info['orig_width'], count), dtype=np.uint8) <NEW_LINE> class_ids = np.zeros((count,), dtype=np.int32) <NEW_LINE> for i, a in enumerate(annotations): <NEW_LINE> <INDENT> if a['Target'] == 1: <NEW_LINE> <INDENT> x = int(a['x']) <NEW_LINE> y = int(a['y']) <NEW_LINE> w = int(a['width']) <NEW_LINE> h = int(a['height']) <NEW_LINE> mask_instance = mask[:, :, i].copy() <NEW_LINE> cv2.rectangle(mask_instance, (x, y), (x+w, y+h), 255, -1) <NEW_LINE> mask[:, :, i] = mask_instance <NEW_LINE> class_ids[i] = 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return mask.astype(np.bool), class_ids.astype(np.int32)
Dataset class for training pneumonia detection on the RSNA pneumonia dataset.
6259905a7b25080760ed87cd
class ScanTask(QThread): <NEW_LINE> <INDENT> stepComplete = Signal(int) <NEW_LINE> def __init__(self, sipa, fipa, rdir, ignoreUnk=True, includeLocalhost=False, parent=None): <NEW_LINE> <INDENT> self.sipa = sipa <NEW_LINE> self.fipa = fipa <NEW_LINE> self.rdir = rdir <NEW_LINE> self.ignoreUnk = ignoreUnk <NEW_LINE> self.includeLocalhost = includeLocalhost <NEW_LINE> self.timetaken = 0 <NEW_LINE> self.steps = util.addrtolong(fipa) - util.addrtolong(sipa) + 1 <NEW_LINE> self.steps = (self.steps + 1) if self.includeLocalhost else self.steps <NEW_LINE> super(ScanTask, self).__init__(parent) <NEW_LINE> self.__stop = False <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> qtp = QThreadPool() <NEW_LINE> t1 = time.time() <NEW_LINE> for ipa in util.genrange(self.sipa, self.fipa): <NEW_LINE> <INDENT> if self.__stop: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> tsk = PingTask.PingTask(ipa, self.rdir, self.ignoreUnk, self.parent()) <NEW_LINE> tsk.complete.connect(self.subTaskComplete) <NEW_LINE> while not qtp.tryStart(tsk): <NEW_LINE> <INDENT> time.sleep(0.5) <NEW_LINE> <DEDENT> <DEDENT> if self.includeLocalhost: <NEW_LINE> <INDENT> tsk = PingTask.PingTask('127.0.0.1', self.rdir, self.ignoreUnk) <NEW_LINE> tsk.complete.connect(self.subTaskComplete) <NEW_LINE> while not qtp.tryStart(tsk): <NEW_LINE> <INDENT> time.sleep(0.5) <NEW_LINE> <DEDENT> <DEDENT> qtp.waitForDone() <NEW_LINE> t2 = time.time() <NEW_LINE> self.timetaken = t2-t1 <NEW_LINE> <DEDENT> def subTaskComplete(self): <NEW_LINE> <INDENT> self.stepComplete.emit(1) <NEW_LINE> <DEDENT> def makeResultsDir(self, qdir): <NEW_LINE> <INDENT> rdir = QDir( qdir.filePath(time.strftime("%Y-%m-%d_%H-%M-%S")) ) <NEW_LINE> if not rdir.exists(): <NEW_LINE> <INDENT> rdir.mkdir(rdir.absolutePath()) <NEW_LINE> <DEDENT> return rdir <NEW_LINE> <DEDENT> def stopScan(self): <NEW_LINE> <INDENT> self.__stop = True
QThread to manage a scan of multiple addresses using a QThreadPool. Because QTP's waitForDone method blocks, it cannot be run in the main thread. Hence, it is outsourced to this thread! str sipa: start IP address for the scan str fipa: finish IP address for the scan QDir dir: where to put the result directory QObject parent: the parent for this thread
6259905a29b78933be26abb2
class PooremptyError(Exception): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Exception.__init__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr("poor is empty")
ip池获取不到ip的错误
6259905a07f4c71912bb0a17
class AdSiteDiscoverer(DaoDiscoverer): <NEW_LINE> <INDENT> def _discover(self): <NEW_LINE> <INDENT> result = AdDiscoveryResult() <NEW_LINE> subnetDtoToOshMap = result.getSubnetDtoToOshMap() <NEW_LINE> siteDtoToOshMap = result.getMap() <NEW_LINE> siteDao = self._daoService.getSiteDao() <NEW_LINE> client = self._daoService.getClient() <NEW_LINE> credentialsId = client.getCredentialId() <NEW_LINE> for siteDto in siteDao.obtainSites(): <NEW_LINE> <INDENT> osh = self.createOsh(siteDto, self._containerOsh, credentialsId) <NEW_LINE> siteDtoToOshMap[siteDto] = osh <NEW_LINE> for subnetDto in siteDto.networkDtos: <NEW_LINE> <INDENT> subnetDtoToOshMap[subnetDto] = osh <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def createOsh(self, siteDto, containerOsh, credentialId=None): <NEW_LINE> <INDENT> osh = modeling.createActiveDirectoryOsh('activedirectorysite', siteDto.name) <NEW_LINE> osh.setContainer(containerOsh) <NEW_LINE> if credentialId is not None: <NEW_LINE> <INDENT> osh.setAttribute('credentials_id', credentialId) <NEW_LINE> <DEDENT> return osh
This class represents discoverer of Active Directory site objects
6259905a3eb6a72ae038bc3b
class GeoFilter(Model): <NEW_LINE> <INDENT> _validation = { 'relative_path': {'required': True}, 'action': {'required': True}, 'country_codes': {'required': True}, } <NEW_LINE> _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'action': {'key': 'action', 'type': 'GeoFilterActions'}, 'country_codes': {'key': 'countryCodes', 'type': '[str]'}, } <NEW_LINE> def __init__(self, relative_path, action, country_codes): <NEW_LINE> <INDENT> self.relative_path = relative_path <NEW_LINE> self.action = action <NEW_LINE> self.country_codes = country_codes
Rules defining user geo access within a CDN endpoint. :param relative_path: Relative path applicable to geo filter. (e.g. '/mypictures', '/mypicture/kitty.jpg', and etc.) :type relative_path: str :param action: Action of the geo filter, i.e. allow or block access. Possible values include: 'Block', 'Allow' :type action: str or :class:`GeoFilterActions <azure.mgmt.cdn.models.GeoFilterActions>` :param country_codes: Two letter country codes defining user country access in a geo filter, e.g. AU, MX, US. :type country_codes: list of str
6259905a379a373c97d9a600
class SKIP_POST_VALIDATION(Validator): <NEW_LINE> <INDENT> def __init__(self, other=None): <NEW_LINE> <INDENT> if other and isinstance(other, (list, tuple)): <NEW_LINE> <INDENT> other = other[0] <NEW_LINE> <DEDENT> self.other = other <NEW_LINE> if other: <NEW_LINE> <INDENT> if hasattr(other, "multiple"): <NEW_LINE> <INDENT> self.multiple = other.multiple <NEW_LINE> <DEDENT> if hasattr(other, "options"): <NEW_LINE> <INDENT> self.options = other.options <NEW_LINE> <DEDENT> if hasattr(other, "formatter"): <NEW_LINE> <INDENT> self.formatter = other.formatter <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __call__(self, value, record_id=None): <NEW_LINE> <INDENT> other = self.other <NEW_LINE> if current.request.env.request_method == "POST" or not other: <NEW_LINE> <INDENT> return value, None <NEW_LINE> <DEDENT> if not isinstance(other, (list, tuple)): <NEW_LINE> <INDENT> other = [other] <NEW_LINE> <DEDENT> for r in other: <NEW_LINE> <INDENT> value, error = r(value) <NEW_LINE> if error: <NEW_LINE> <INDENT> return value, error <NEW_LINE> <DEDENT> <DEDENT> return value, None
Pseudo-validator that allows introspection of field options during GET, but does nothing during POST. Used for Ajax-validated inline-components to prevent them from throwing validation errors when the outer form gets submitted.
6259905a498bea3a75a590ea
class TmpSgpr: <NEW_LINE> <INDENT> def __init__(self, regPool, num, align, tag=None): <NEW_LINE> <INDENT> self.regPool = regPool <NEW_LINE> self.regIdx = regPool.checkOutAligned(num, align, tag=tag, preventOverflow=False) <NEW_LINE> <DEDENT> def idx(self): <NEW_LINE> <INDENT> return self.regIdx <NEW_LINE> <DEDENT> def __int__(self): <NEW_LINE> <INDENT> return self.idx() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.regPool.checkIn(self.regIdx)
A temporary register which is automatically returned to sgpr pool when class is destroyed.
6259905ad6c5a102081e36fd
class SingleRequestTransport(xmlrpclib.Transport): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if 'timeout' in kwargs: <NEW_LINE> <INDENT> self.timeout = kwargs['timeout'] <NEW_LINE> del kwargs['timeout'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timeout = sslutils.SOCKET_DEFAULT_TIMEOUT <NEW_LINE> <DEDENT> xmlrpclib.Transport.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def make_connection(self, host): <NEW_LINE> <INDENT> self._connection = None <NEW_LINE> httpCon = xmlrpclib.Transport.make_connection(self, host) <NEW_LINE> httpCon.timeout = self.timeout <NEW_LINE> return httpCon
Python 2.7 Transport introduced a change that makes it reuse connections by default when new connections are requested for a host with an existing connection. This class reverts the change to avoid the concurrency issues.
6259905ae64d504609df9ebd
class GIFRequest(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.type = None <NEW_LINE> self.config = None <NEW_LINE> self.x_forwarded_for = None <NEW_LINE> self.user_agent = None <NEW_LINE> self.__Q = Q() <NEW_LINE> if isinstance(config, Config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> <DEDENT> <DEDENT> def build_http_request(self): <NEW_LINE> <INDENT> params = self.build_parameters() <NEW_LINE> query_string = urllib.urlencode(params.get_parameters()) <NEW_LINE> query_string = query_string.replace('+', '%20') <NEW_LINE> query_string = utils.convert_to_uri_component_encoding(query_string) <NEW_LINE> use_post = len(query_string) > 2036 <NEW_LINE> if not use_post: <NEW_LINE> <INDENT> url = '%s?%s' % (self.config.endpoint, query_string) <NEW_LINE> post = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = self.config.endpoint <NEW_LINE> post = query_string <NEW_LINE> <DEDENT> headers = {} <NEW_LINE> headers['Host'] = self.config.endpoint.split('/')[2] <NEW_LINE> headers['User-Agent'] = self.user_agent <NEW_LINE> headers['X-Forwarded-For'] = self.x_forwarded_for and self.x_forwarded_for or '' <NEW_LINE> if use_post: <NEW_LINE> <INDENT> headers['Content-Type'] = 'text/plain' <NEW_LINE> headers['Content-Length'] = len(query_string) <NEW_LINE> <DEDENT> logger.debug(url) <NEW_LINE> if post: <NEW_LINE> <INDENT> logger.debug(post) <NEW_LINE> <DEDENT> return urllib2.Request(url, post, headers) <NEW_LINE> <DEDENT> def build_parameters(self): <NEW_LINE> <INDENT> return Parameters() <NEW_LINE> <DEDENT> def __send(self): <NEW_LINE> <INDENT> request = self.build_http_request() <NEW_LINE> response = None <NEW_LINE> if self.config.endpoint: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = urllib2.urlopen(request, timeout=self.config.request_timeout) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> response = urllib2.urlopen(request) <NEW_LINE> <DEDENT> <DEDENT> return response <NEW_LINE> <DEDENT> def fire(self): <NEW_LINE> <INDENT> if self.config.queue_requests: <NEW_LINE> <INDENT> self.__Q.add_wrapped_request((lambda: self.__send())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__send()
Properties: type -- Indicates the type of request, will be mapped to "utmt" parameter config -- base.Config object x_forwarded_for -- user_agent -- User Agent String
6259905acb5e8a47e493cc74
class DifferentSamplerateError(InvalidSamplerateError): <NEW_LINE> <INDENT> def __init__(self, frequencies: typing.Tuple[float, ...]) -> None: <NEW_LINE> <INDENT> ValueError.__init__( self, 'all samplerates must be the same value but got {}'.format( ' and '.join(str(x) for x in set(frequencies)) ) ) <NEW_LINE> self.frequency = frequencies
The exception that raises when different samplerates of sounds to joint :var frequency: List of passed frequencies.
6259905ab57a9660fecd3057
class RTOpenParens(RTAtom): <NEW_LINE> <INDENT> def __init__(self, src='(', container=None): <NEW_LINE> <INDENT> super().__init__(src, container) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<RTOpenParens %r>' % self.src
A simple open parenthesis Atom with a sensible default >>> romanText.rtObjects.RTOpenParens('(') <RTOpenParens '('>
6259905ab5575c28eb7137ba